Hi,
Anyone has any idea for the following program by LabVIEW. Need your help....PLEASESSSSS
/ here this program will read in data
// it is assumed that a previous high signal is already sent to
// the microcontroller and data is ready to read
// the start conversion signal is send by the end of the procedure
// assuming next conversion will be there.
// This procedure can be used only when to make a series of conversions.
// The first data should be neglected.
int getdata(void)
{
unsigned int a, b=0, c, d;
outportb(CONTROL, 0x31); //set the control bit 0 to 0
while (!int_occured);
a = inportb(STATUS)>>0x03; // only upper 5 bits can be used, shift them to right
b = inportb(DATA); // both 8 bits will be used
if (a & 0x10)
a = a & 0x07; // the bit 7 is inverted, and bit 6
else a = (a & 0x07) | 0x08; // is just the end of conversion signal
// So, invert bit 7 and take out bit 6
c = ((a << 0x08) | b); // The final 12 bit data
int_occured = 0;
outportb(CONTROL, 0x30); // set the control bit 1
return (c);
//
void open_intserv(void)
{
int int_mask;
disable();
oldfunc = getvect(intlev);
setvect(intlev, intserv);
int_mask=inportb(0x21);
outportb(0x21, int_mask & ~0x80);
enable();
}