Short Description
* duff.c Duffs Device, a fast copy algorithm
* crypt.c the Salt was to short.
* files.c massive directory creation
* fork.c a small fork demo
committer: Markus Bröker <mbroeker@largo.homelinux.org>
/**
* test/demos/lsflib/src/tokenizer.c
* Copyright (C) 2008 Markus Broeker
*/
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <ctype.h>
#include <string.h>
#include <lsf.h>
Node *tokenizer (FILE * f, const char *delim)
{
/*
* the main routine of the tokenizer
*
*/
Node *aktuell, *begin;
char line[LINE_LENGTH];
char *token;
int tokens;
*line = tokens = 0;
aktuell = begin = addnode (NULL, "NULL");
while ((fgets (line, LINE_LENGTH, f)) != NULL) {
token = strtok (line, delim);
while (token) {
aktuell = addnode (aktuell, token);
token = strtok (NULL, delim);
}
*line = 0;
}
rewind (f);
aktuell = begin->next;
free (begin->data);
free (begin);
return aktuell;
}