reformat
This commit is contained in:
parent
207fd90233
commit
8c226f5f31
@ -7,11 +7,11 @@ BinPackParameters: 'false'
|
||||
BreakBeforeBraces: Allman
|
||||
ColumnLimit: '120'
|
||||
IndentPPDirectives: BeforeHash
|
||||
IndentWidth: '2'
|
||||
IndentWidth: '4'
|
||||
Language: Cpp
|
||||
PointerAlignment: Right
|
||||
SortIncludes: 'false'
|
||||
TabWidth: '2'
|
||||
UseTab: ForContinuationAndIndentation
|
||||
TabWidth: '4'
|
||||
UseTab: AlignWithSpaces
|
||||
|
||||
...
|
||||
|
||||
134
rtcore.h
134
rtcore.h
@ -22,7 +22,7 @@
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
*/
|
||||
|
||||
/* My base layer library.
|
||||
* - fixed width types
|
||||
@ -153,12 +153,14 @@ typedef struct arena_cp
|
||||
byte *cp;
|
||||
} arena_cp;
|
||||
|
||||
static force_inline arena_cp SaveArena(arena a)
|
||||
static force_inline arena_cp
|
||||
SaveArena(arena a)
|
||||
{
|
||||
return (arena_cp){a.begin};
|
||||
}
|
||||
|
||||
static force_inline void RestoreArena(arena *a, arena_cp cp)
|
||||
static force_inline void
|
||||
RestoreArena(arena *a, arena_cp cp)
|
||||
{
|
||||
a->begin = cp.cp;
|
||||
}
|
||||
@ -179,9 +181,9 @@ enum
|
||||
*/
|
||||
#define alloc(...) allocx(__VA_ARGS__, alloc4, alloc3, alloc2)(__VA_ARGS__)
|
||||
#define allocx(a, b, c, d, e, ...) e
|
||||
#define alloc2(a, t) (t*)ArenaAlloc(a, isizeof(t), _Alignof(t), 1, 0)
|
||||
#define alloc3(a, t, n) (t*)ArenaAlloc(a, isizeof(t), _Alignof(t), n, 0)
|
||||
#define alloc4(a, t, n, f) (t*)ArenaAlloc(a, isizeof(t), _Alignof(t), n, f)
|
||||
#define alloc2(a, t) (t *)ArenaAlloc(a, isizeof(t), _Alignof(t), 1, 0)
|
||||
#define alloc3(a, t, n) (t *)ArenaAlloc(a, isizeof(t), _Alignof(t), n, 0)
|
||||
#define alloc4(a, t, n, f) (t *)ArenaAlloc(a, isizeof(t), _Alignof(t), n, f)
|
||||
|
||||
RTC_API void *ArenaAlloc(arena *a, isize size, isize align, isize n, int flags);
|
||||
|
||||
@ -194,10 +196,14 @@ typedef struct s8
|
||||
#define S8(_s) \
|
||||
(s8) { .data = (u8 *)_s, .length = lengthof(_s), }
|
||||
|
||||
typedef struct { s8 first; s8 second; } split_result;
|
||||
typedef struct
|
||||
{
|
||||
s8 first;
|
||||
s8 second;
|
||||
} split_result;
|
||||
|
||||
/* constructs a string containing the bytes between begin and end (exclusive) */
|
||||
RTC_API s8 S8Span(u8* begin, u8* end);
|
||||
RTC_API s8 S8Span(u8 *begin, u8 *end);
|
||||
/* check if two strings are equal */
|
||||
RTC_API b32 S8Equals(s8 a, s8 b);
|
||||
/* compare two strings. analogue to strcmp */
|
||||
@ -230,8 +236,16 @@ RTC_API split_result S8Split2(s8 s, u8 c);
|
||||
/* Creates a clone of string s on arena a */
|
||||
RTC_API s8 S8Clone(s8 s, arena *a);
|
||||
|
||||
typedef struct { i64 i; b32 ok; } s8_parse_i64_result;
|
||||
typedef struct { i32 i; b32 ok; } s8_parse_i32_result;
|
||||
typedef struct
|
||||
{
|
||||
i64 i;
|
||||
b32 ok;
|
||||
} s8_parse_i64_result;
|
||||
typedef struct
|
||||
{
|
||||
i32 i;
|
||||
b32 ok;
|
||||
} s8_parse_i32_result;
|
||||
|
||||
/* Parses a integer from string s */
|
||||
RTC_API s8_parse_i64_result S8ParseI64(s8 s, int base);
|
||||
@ -273,48 +287,40 @@ RTC_API b32 WriteEntireFile(s8 path, byte *data, isize length);
|
||||
#define AtomicLoad(_ptr) _InterlockedOr(_ptr, 0)
|
||||
#define AtomicLoadAcquire(_ptr) _InterlockedOr_HLEAcquire(_ptr, 0)
|
||||
#elif defined(__TINYC__)
|
||||
#define AtomicInc32(_addend) do { \
|
||||
__asm__ volatile( \
|
||||
"lock incl %0" \
|
||||
: "+m" (*_addend) \
|
||||
); \
|
||||
#define AtomicInc32(_addend) \
|
||||
do \
|
||||
{ \
|
||||
__asm__ volatile("lock incl %0" : "+m"(*_addend)); \
|
||||
} while (0)
|
||||
#define AtomicInc64(_addend) do { \
|
||||
__asm__ volatile( \
|
||||
"lock incq %0" \
|
||||
: "+m" (*_addend) \
|
||||
); \
|
||||
#define AtomicInc64(_addend) \
|
||||
do \
|
||||
{ \
|
||||
__asm__ volatile("lock incq %0" : "+m"(*_addend)); \
|
||||
} while (0)
|
||||
#define AtomicAdd32(_addend, _val) do { \
|
||||
__asm__ volatile( \
|
||||
"lock addl %1, %0" \
|
||||
: "+m" (*_addend) \
|
||||
: "r" (_val) \
|
||||
); \
|
||||
#define AtomicAdd32(_addend, _val) \
|
||||
do \
|
||||
{ \
|
||||
__asm__ volatile("lock addl %1, %0" : "+m"(*_addend) : "r"(_val)); \
|
||||
} while (0)
|
||||
#define AtomicAdd64(_addend, _val) do { \
|
||||
__asm__ volatile( \
|
||||
"lock addq %1, %0" \
|
||||
: "+m" (*_addend) \
|
||||
: "r" (_val) \
|
||||
); \
|
||||
#define AtomicAdd64(_addend, _val) \
|
||||
do \
|
||||
{ \
|
||||
__asm__ volatile("lock addq %1, %0" : "+m"(*_addend) : "r"(_val)); \
|
||||
} while (0)
|
||||
/* This uses mov followed by mfence to ensure that
|
||||
* the store becomes globally visible to any subsequent load or store. */
|
||||
#define AtomicStore(_ptr, _val) do { \
|
||||
__asm__ volatile( \
|
||||
"movl %1, %0;" \
|
||||
#define AtomicStore(_ptr, _val) \
|
||||
do \
|
||||
{ \
|
||||
__asm__ volatile("movl %1, %0;" \
|
||||
"mfence;" \
|
||||
: "=m" (*_ptr) \
|
||||
: "r" (_val) \
|
||||
); \
|
||||
} while(0)
|
||||
#define AtomicStoreRelease(_ptr, _val) do { \
|
||||
__asm__ volatile( \
|
||||
"movl %1, %0" \
|
||||
: "=m" (*_ptr) \
|
||||
: "r" (_val) \
|
||||
); \
|
||||
: "=m"(*_ptr) \
|
||||
: "r"(_val)); \
|
||||
} while (0)
|
||||
#define AtomicStoreRelease(_ptr, _val) \
|
||||
do \
|
||||
{ \
|
||||
__asm__ volatile("movl %1, %0" : "=m"(*_ptr) : "r"(_val)); \
|
||||
} while (0)
|
||||
/* NOTE(Kevin): This should always compile to a mov, which is what we want. */
|
||||
#define AtomicLoad(_ptr) (*(_ptr))
|
||||
@ -334,30 +340,34 @@ RTC_API b32 WriteEntireFile(s8 path, byte *data, isize length);
|
||||
#define PopCount32(_x) __builtin_popcount(_x)
|
||||
#define PopCount64(_x) __builtin_popcountl(_x)
|
||||
#elif defined(_MSC_VER)
|
||||
static force_inline unsigned int CTZ32(u32 x)
|
||||
{
|
||||
static force_inline unsigned int
|
||||
CTZ32(u32 x)
|
||||
{
|
||||
unsigned int index;
|
||||
_BitScanReverse(&index, x);
|
||||
return index;
|
||||
}
|
||||
static force_inline unsigned int CTZ64(u64 x)
|
||||
{
|
||||
}
|
||||
static force_inline unsigned int
|
||||
CTZ64(u64 x)
|
||||
{
|
||||
unsigned int index;
|
||||
_BitScanReverse64(&index, x);
|
||||
return index;
|
||||
}
|
||||
static force_inline unsigned int CLZ32(u32 x)
|
||||
{
|
||||
}
|
||||
static force_inline unsigned int
|
||||
CLZ32(u32 x)
|
||||
{
|
||||
unsigned int index;
|
||||
_BitScanForward(&index, x);
|
||||
return index;
|
||||
}
|
||||
static force_inline unsigned int CLZ64(u64 x)
|
||||
{
|
||||
}
|
||||
static force_inline unsigned int
|
||||
CLZ64(u64 x)
|
||||
{
|
||||
unsigned int index;
|
||||
_BitScanForward64(&index, x);
|
||||
return index;
|
||||
}
|
||||
}
|
||||
#define PopCount32(_x) __popcnt(_x)
|
||||
#define PopCount64(_x) __popcnt64(_x)
|
||||
#else
|
||||
@ -371,9 +381,9 @@ typedef struct thread thread;
|
||||
/* Win32 uses DWORD as the return type, Linux uses void *.
|
||||
* I don't think that I've ever used a threads return value... */
|
||||
#ifdef _WIN32
|
||||
#define THREAD_RETURN_TYPE u32
|
||||
#define THREAD_RETURN_TYPE u32
|
||||
#elif defined(__linux__)
|
||||
#define THREAD_RETURN_TYPE void *
|
||||
#define THREAD_RETURN_TYPE void *
|
||||
#endif
|
||||
|
||||
/* Generates a thread entry point */
|
||||
@ -418,7 +428,7 @@ ArenaAlloc(arena *a, isize size, isize align, isize n, int flags)
|
||||
|
||||
/* S8 funcs */
|
||||
RTC_API s8
|
||||
S8Span(u8* begin, u8* end)
|
||||
S8Span(u8 *begin, u8 *end)
|
||||
{
|
||||
s8 s = {0};
|
||||
s.data = begin;
|
||||
@ -679,8 +689,7 @@ JoinThread(thread *t)
|
||||
RTC_API thread *
|
||||
StartThread(thread_fn *fn, void *param)
|
||||
{
|
||||
HANDLE h = CreateThread(
|
||||
NULL,
|
||||
HANDLE h = CreateThread(NULL,
|
||||
0, /* Use default stack size */
|
||||
(LPTHREAD_START_ROUTINE)fn,
|
||||
param,
|
||||
@ -705,4 +714,3 @@ JoinThread(thread *t)
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user