Began implementing the occam intrinsics (some of them, at least) in the C support header
This commit is contained in:
parent
7337978dcf
commit
0189ba3793
|
@ -160,7 +160,8 @@ generateC = generate cgenOps
|
|||
|
||||
cgenTopLevel :: A.AST -> CGen ()
|
||||
cgenTopLevel s
|
||||
= do tell ["#include <tock_support_cif.h>\n"]
|
||||
= do tell ["#define occam_INT_size ", show cIntSize,"\n"]
|
||||
tell ["#include <tock_support_cif.h>\n"]
|
||||
cs <- getCompState
|
||||
(tlpName, tlpChans) <- tlpInterface
|
||||
chans <- sequence [csmLift $ makeNonce "tlp_channel" | _ <- tlpChans]
|
||||
|
|
|
@ -134,7 +134,8 @@ cppcspPrereq = cCppCommonPreReq ++ [Prop.allChansToAnyOrProtocol]
|
|||
-- | Generates the top-level code for an AST.
|
||||
cppgenTopLevel :: A.AST -> CGen ()
|
||||
cppgenTopLevel s
|
||||
= do tell ["#include <tock_support_cppcsp.h>\n"]
|
||||
= do tell ["#define occam_INT_size ", show cxxIntSize,"\n"]
|
||||
tell ["#include <tock_support_cppcsp.h>\n"]
|
||||
--In future, these declarations could be moved to a header file:
|
||||
sequence_ $ map (call genForwardDeclaration) (listify (const True :: A.Specification -> Bool) s)
|
||||
call genStructured s (\m _ -> tell ["\n#error Invalid top-level item: ",show m])
|
||||
|
|
|
@ -2,6 +2,7 @@ int g_stopped;
|
|||
|
||||
#define occam_stop(pos, nargs, format, args...) do { g_stopped = 1; } while (0)
|
||||
|
||||
#define occam_INT_size SIZEOF_INT
|
||||
#include "support/tock_support.h"
|
||||
|
||||
#define report_failure(msg, args...) { printf(msg, ##args); }
|
||||
|
|
|
@ -406,6 +406,85 @@ static inline double occam_DSQRT (double, const char *) occam_unused;
|
|||
static inline double occam_DSQRT (double v, const char *pos) {
|
||||
return sqrt (v);
|
||||
}
|
||||
|
||||
//We use #define so we can #undef afterwards
|
||||
#if occam_INT_size == 4
|
||||
#define INT int32_t
|
||||
#define UINT uint32_t
|
||||
#elif occam_INT_size == 8
|
||||
#define INT int64_t
|
||||
#define UINT uint64_t
|
||||
#else
|
||||
#error You must define occam_INT_size when using this header
|
||||
#endif
|
||||
//Note that in the occam 2 manual, there is an error on page 104.
|
||||
//They say that range is the number of storable values in INTEGER,
|
||||
//their conceptual infinite type, so range would be infinity!
|
||||
//However, it is clear from the following lines that range is
|
||||
//the number of storable values in INT.
|
||||
|
||||
#define occam_unsign(x) ((UINT)(x))
|
||||
#define occam_sign(x) ((INT)(x))
|
||||
|
||||
static inline INT occam_LONGADD (INT, INT, INT, const char *) occam_unused;
|
||||
static inline INT occam_LONGADD (INT left, INT right, INT carry_in, const char *pos) {
|
||||
if (left == __MAX(INT)) {
|
||||
if (right == __MAX(INT)) {
|
||||
occam_stop(pos, 3, "Overflow in LONGADD: %d + %d", left, right);
|
||||
} else right += carry_in & 1;
|
||||
} else left += carry_in & 1;
|
||||
|
||||
if (((right<1)&&(__MIN(INT)-right<=left)) || ((right>=1)&&(__MAX(INT)-right>=left))) {
|
||||
return left + right;
|
||||
} else {
|
||||
occam_stop(pos, 3, "Overflow in LONGADD: %d + %d", left, right);
|
||||
}
|
||||
}
|
||||
|
||||
static inline INT occam_LONGSUM (INT, INT, INT, INT*, const char *) occam_unused;
|
||||
static inline INT occam_LONGSUM (INT left, INT right, INT carry_in, INT* result1, const char *pos) {
|
||||
const UINT leftu = occam_unsign(left);
|
||||
const UINT rightu = occam_unsign(right);
|
||||
if (leftu == __MAX(UINT)) {
|
||||
if (rightu == __MAX(UINT)) {
|
||||
*result1 = -2;
|
||||
return 1;
|
||||
} else rightu += carry_in & 1;
|
||||
} else leftu += carry_in & 1;
|
||||
|
||||
if (__MAX(UINT)-rightu>=leftu) {
|
||||
*result1 = occam_sign(leftu + rightu);
|
||||
return 0;
|
||||
} else {
|
||||
*result1 = occam_sign(leftu + rightu);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
static inline INT occam_NORMALISE (INT, INT, INT*, INT*,const char *) occam_unused;
|
||||
static inline INT occam_NORMALISE (INT hi_in, INT lo_in, INT* result1, INT* result2, const char *pos) {
|
||||
if (hi_in == 0 && lo_in == 0) {
|
||||
*result1 = *result2 = 0;
|
||||
return 2*CHAR_BIT*sizeof(INT);
|
||||
} else {
|
||||
const INT highest_bit = __MIN(INT);
|
||||
INT hi = hi_in;
|
||||
INT lo = lo_in;
|
||||
INT places = 0;
|
||||
while ((hi & highest_bit) == 0) {
|
||||
hi <<= 1;
|
||||
hi |= (lo & highest_bit) >> ((CHAR_BIT*sizeof(INT))-1);
|
||||
lo <<= 1;
|
||||
places++;
|
||||
}
|
||||
*result1 = hi;
|
||||
*result2 = lo;
|
||||
return places;
|
||||
}
|
||||
}
|
||||
#undef INT
|
||||
#undef UINT
|
||||
|
||||
//}}}
|
||||
|
||||
//{{{ Terminal handling
|
||||
|
|
Loading…
Reference in New Issue
Block a user