I have gone through many posts but still couldn't find any solution to my problem . I am trying to make a lexical analyzer which reads the entire C file and generate tokens from it , till Now I have splitted the entire file into lines and then stored the lines in an array and then extract words from those lines ,now the problem is that I have made different arrays like one for operators ,one for delimeters, one for keywords so that I can match exactly what those words which I generated are ?
Now here is my code ,please guide me in doing this , I am not getting the way out.
Now here is my code ,please guide me in doing this , I am not getting the way out.
Code:
char line[300][200];
char delim[]=" \n\t}{][)(,;*";
int main()
{
int i=0,j=0;
FILE *fp1;
char ch;
char sourcename[20];
char *words[1000];
char *token;
int count_no_lines =0;
/*printf("\n\n ENTER SOURCE FILE NAME\n");
scanf("%s",sourcename);
lex(sourcename);
*/
fp1=fopen("demo.c","r");
while(fgets(&line[i][0], 100,fp1)!=NULL)
{
token = strtok (&line[i][0],delim);
while (token != NULL)
{
words[j++] = token;
token = strtok (NULL, delim);
}
i++;
}
for(i=0;i<90;i++)
{
printf("%s\n", words[i]);
}
return 0;
}
void lex(char filename[50])
{
FILE *fp;
if(fp==NULL)
{
printf("\n FILE NOT FOUND \n");
}
else
return ;
}
Comment