Merge branch 'develop' into bugfix/documentation/doxygen-test
This commit is contained in:
commit
636b378ce7
@ -315,8 +315,8 @@ clean:
|
||||
|
||||
distclean:
|
||||
@for TARG in `ls $(CONTIKI)/arch/platform $(TARGETDIRS)`; do \
|
||||
echo Running: make TARGET=$$TARG clean; \
|
||||
make TARGET=$$TARG clean; \
|
||||
echo Running: $(MAKE) TARGET=$$TARG clean; \
|
||||
$(MAKE) TARGET=$$TARG clean; \
|
||||
done
|
||||
|
||||
-include $(CONTIKI)/arch/platform/$(TARGET)/Makefile.customrules-$(TARGET)
|
||||
@ -446,6 +446,7 @@ savedefines:
|
||||
@echo "saving Makefile.$(TARGET).defines"
|
||||
@echo >Makefile.$(TARGET).defines "DEFINES = $(DEFINES)"
|
||||
|
||||
VIEWCONF = $(CONTIKI)/tools/viewconf/viewconf.c
|
||||
viewconf:
|
||||
@echo "----------------- Make variables: --------------"
|
||||
@echo "##### \"TARGET\": ________________________________ $(TARGET)"
|
||||
@ -457,13 +458,13 @@ ifdef MAKE_COAP_DTLS_KEYSTORE
|
||||
@echo "##### \"MAKE_COAP_DTLS_KEYSTORE\": _______________ $(MAKE_COAP_DTLS_KEYSTORE)"
|
||||
endif
|
||||
@echo "----------------- C variables: -----------------"
|
||||
$(Q)$(CC) $(CFLAGS) -E $(CONTIKI)/tools/viewconf.c | grep \#\#\#\#\#
|
||||
$(Q)$(CC) $(CFLAGS) -E $(VIEWCONF) | grep \#\#\#\#\#
|
||||
@echo "------------------------------------------------"
|
||||
@echo "'==' Means the flag is set to a given a value"
|
||||
@echo "'->' Means the flag is unset, but will default to a given value"
|
||||
@echo "'><' Means the flag is unset and has no default value"
|
||||
@echo "To view more Make variables, edit $(CONTIKI)/Makefile.include, rule 'viewconf'"
|
||||
@echo "To view more C variables, edit $(CONTIKI)/tools/viewconf.c"
|
||||
@echo "To view more C variables, edit $(VIEWCONF)"
|
||||
|
||||
### Include Makefile.embedded for relevant platforms, in order to pull in
|
||||
### rules for login, serialview etc
|
||||
|
@ -11,7 +11,7 @@ TUNSLIP6 = $(SERIAL_IO_TOOL_DIR)/tunslip6
|
||||
SERIAL_DUMP_BIN = $(SERIAL_IO_TOOL_DIR)/serialdump
|
||||
|
||||
$(SERIAL_DUMP_BIN): $(SERIAL_IO_TOOL_DIR)/serialdump.c $(SERIAL_IO_TOOL_DEPS)
|
||||
make -C $(SERIAL_IO_TOOL_DIR) serialdump
|
||||
$(MAKE) -C $(SERIAL_IO_TOOL_DIR) serialdump
|
||||
|
||||
$(TUNSLIP6): $(SERIAL_IO_TOOL_DIR)/tunslip6.c $(SERIAL_IO_TOOL_DEPS)
|
||||
make -C $(SERIAL_IO_TOOL_DIR) tunslip6
|
||||
$(MAKE) -C $(SERIAL_IO_TOOL_DIR) tunslip6
|
||||
|
@ -3,4 +3,7 @@ all: $(CONTIKI_PROJECT)
|
||||
|
||||
MODULES += os/services/shell
|
||||
CONTIKI = ../../..
|
||||
|
||||
PLATFORMS_EXCLUDE = sky
|
||||
|
||||
include $(CONTIKI)/Makefile.include
|
||||
|
@ -94,11 +94,12 @@ set_global_address(uip_ipaddr_t *prefix, uip_ipaddr_t *iid)
|
||||
|
||||
uip_ds6_addr_add(&root_ipaddr, 0, ADDR_AUTOCONF);
|
||||
|
||||
LOG_INFO("IPv6 addresses: ");
|
||||
LOG_INFO("IPv6 addresses:\n");
|
||||
for(i = 0; i < UIP_DS6_ADDR_NB; i++) {
|
||||
state = uip_ds6_if.addr_list[i].state;
|
||||
if(uip_ds6_if.addr_list[i].isused &&
|
||||
(state == ADDR_TENTATIVE || state == ADDR_PREFERRED)) {
|
||||
LOG_INFO("-- ");
|
||||
LOG_INFO_6ADDR(&uip_ds6_if.addr_list[i].ipaddr);
|
||||
LOG_INFO_("\n");
|
||||
}
|
||||
|
@ -104,7 +104,7 @@ update_state(void)
|
||||
/* how many more IP neighbors can be have? */
|
||||
num_free = NBR_TABLE_MAX_NEIGHBORS - num_used;
|
||||
|
||||
LOG_INFO("nbr-policy: free: %d, parents: %d\n", num_free, num_parents);
|
||||
LOG_DBG("nbr-policy: free: %d, parents: %d\n", num_free, num_parents);
|
||||
}
|
||||
/*---------------------------------------------------------------------------*/
|
||||
static const linkaddr_t *
|
||||
@ -127,12 +127,12 @@ find_removable_dio(uip_ipaddr_t *from, rpl_dio_t *dio)
|
||||
/* Add the new neighbor only if it is better than the current worst. */
|
||||
if(dio->rank + curr_instance.min_hoprankinc < worst_rank - curr_instance.min_hoprankinc / 2) {
|
||||
/* Found *great* neighbor - add! */
|
||||
LOG_INFO("nbr-policy: DIO rank %u, worse_rank %u -- add to cache\n",
|
||||
LOG_DBG("nbr-policy: DIO rank %u, worst_rank %u -- add to cache\n",
|
||||
dio->rank, worst_rank);
|
||||
return worst_rank_nbr_lladdr;
|
||||
}
|
||||
|
||||
LOG_INFO("nbr-policy: DIO rank %u, worse_rank %u -- do not add to cache\n",
|
||||
LOG_DBG("nbr-policy: DIO rank %u, worst_rank %u -- do not add to cache\n",
|
||||
dio->rank, worst_rank);
|
||||
return NULL;
|
||||
}
|
||||
|
@ -405,9 +405,9 @@ rpl_neighbor_select_best(void)
|
||||
/* The best is not fresh. Probe it (unless there is already an urgent
|
||||
probing target). We will be called back after the probing anyway. */
|
||||
if(curr_instance.dag.urgent_probing_target == NULL) {
|
||||
LOG_WARN("best parent is not fresh, schedule urgent probing to ");
|
||||
LOG_WARN_6ADDR(rpl_neighbor_get_ipaddr(best));
|
||||
LOG_WARN_("\n");
|
||||
LOG_INFO("best parent is not fresh, schedule urgent probing to ");
|
||||
LOG_INFO_6ADDR(rpl_neighbor_get_ipaddr(best));
|
||||
LOG_INFO_("\n");
|
||||
curr_instance.dag.urgent_probing_target = best;
|
||||
rpl_schedule_probing_now();
|
||||
}
|
||||
|
@ -22,10 +22,9 @@ java -Xshare:on -jar $CONTIKI/tools/cooja/dist/cooja.jar -nogui=$BASENAME.csc -c
|
||||
JPID=$!
|
||||
sleep 20
|
||||
|
||||
# Connect to the simlation
|
||||
# Connect to the simulation
|
||||
echo "Starting tunslip6"
|
||||
make -C $CONTIKI/tools tunslip6
|
||||
make -C $CONTIKI/examples/rpl-border-router/ connect-router-cooja TARGET=zoul >> $BASENAME.tunslip.log 2>&1 &
|
||||
make -C $CONTIKI/examples/rpl-border-router/ connect-router-cooja TARGET=zoul >> $BASENAME.tunslip6.log 2>&1 &
|
||||
MPID=$!
|
||||
printf "Waiting for network formation (%d seconds)\n" "$WAIT_TIME"
|
||||
sleep $WAIT_TIME
|
||||
@ -49,7 +48,7 @@ if [ $STATUS -eq 0 ] && [ $HOPS -eq $TARGETHOPS ] ; then
|
||||
printf "%-32s TEST OK\n" "$BASENAME" | tee $BASENAME.testlog;
|
||||
else
|
||||
echo "==== $BASENAME.coojalog ====" ; cat $BASENAME.coojalog;
|
||||
echo "==== $BASENAME.tunslip.log ====" ; cat $BASENAME.tunslip.log;
|
||||
echo "==== $BASENAME.tunslip6.log ====" ; cat $BASENAME.tunslip6.log;
|
||||
echo "==== $BASENAME.scriptlog ====" ; cat $BASENAME.scriptlog;
|
||||
|
||||
printf "%-32s TEST FAIL\n" "$BASENAME" | tee $BASENAME.testlog;
|
||||
|
@ -28,10 +28,9 @@ java -Xshare:on -jar $CONTIKI/tools/cooja/dist/cooja.jar -nogui=$BASENAME.csc -c
|
||||
JPID=$!
|
||||
sleep 20
|
||||
|
||||
# Connect to the simlation
|
||||
# Connect to the simulation
|
||||
echo "Starting tunslip6"
|
||||
make -C $CONTIKI/tools tunslip6
|
||||
make -C $CONTIKI/examples/rpl-border-router/ connect-router-cooja TARGET=zoul >> $BASENAME.tunslip.log 2>&1 &
|
||||
make -C $CONTIKI/examples/rpl-border-router/ connect-router-cooja TARGET=zoul >> $BASENAME.tunslip6.log 2>&1 &
|
||||
MPID=$!
|
||||
printf "Waiting for network formation (%d seconds)\n" "$WAIT_TIME"
|
||||
sleep $WAIT_TIME
|
||||
@ -55,7 +54,7 @@ if [ $STATUS -eq 0 ] && [ $REPLIES -eq $COUNT ] ; then
|
||||
printf "%-32s TEST OK\n" "$BASENAME" | tee $BASENAME.testlog;
|
||||
else
|
||||
echo "==== $BASENAME.coojalog ====" ; cat $BASENAME.coojalog;
|
||||
echo "==== $BASENAME.tunslip.log ====" ; cat $BASENAME.tunslip.log;
|
||||
echo "==== $BASENAME.tunslip6.log ====" ; cat $BASENAME.tunslip6.log;
|
||||
echo "==== $BASENAME.scriptlog ====" ; cat $BASENAME.scriptlog;
|
||||
|
||||
printf "%-32s TEST FAIL\n" "$BASENAME" | tee $BASENAME.testlog;
|
||||
|
@ -28,7 +28,7 @@ java -Xshare:on -jar $CONTIKI/tools/cooja/dist/cooja.jar -nogui=$BASENAME.csc -c
|
||||
JPID=$!
|
||||
sleep 20
|
||||
|
||||
# Connect to the simlation
|
||||
# Connect to the simulation
|
||||
echo "Starting native border-router"
|
||||
nohup make -C $CONTIKI/examples/rpl-border-router/ connect-router-cooja TARGET=native >> $BASENAME.nbr.log 2>&1 &
|
||||
MPID=$!
|
||||
|
@ -36,7 +36,7 @@ summary: $(SUMMARIES)
|
||||
@cat $(SUMMARIES) > $@
|
||||
|
||||
%/summary:
|
||||
@make -C $* summary || true
|
||||
@$(MAKE) -C $* summary || true
|
||||
|
||||
clean:
|
||||
rm -f $(SUMMARIES) summary
|
||||
|
@ -40,7 +40,7 @@ get_target_vars = $(wordlist 2,15,$(subst :, ,$1))
|
||||
define dooneexample
|
||||
@echo -n Building example $(3): $(1) $(4) for target $(2)
|
||||
@((cd $(EXAMPLESDIR)/$(1); \
|
||||
make $(4) TARGET=$(2) clean && make -j $(4) TARGET=$(2) WERROR=1) > \
|
||||
$(MAKE) $(4) TARGET=$(2) clean && make -j $(4) TARGET=$(2) WERROR=1) > \
|
||||
/dev/null 2>make.err && \
|
||||
(echo " -> OK" && printf "%-75s %-40s %-20s TEST OK\n" "$(1)" "$(4)" "$(2)" > $(3)-$(subst /,-,$(1))$(2).testlog) || \
|
||||
(echo " -> FAIL" && printf "%-75s %-40s %-20s TEST FAIL\n" "$(1)" "$(4)" "$(2)" > $(3)-$(subst /,-,$(1))$(2).testlog ; cat make.err))
|
||||
@ -65,4 +65,4 @@ clean:
|
||||
@rm -f *.testlog summary
|
||||
@$(foreach example, $(EXAMPLES), \
|
||||
$(foreach target, $(EXAMPLESTARGETS), \
|
||||
(cd $(EXAMPLESDIR)/$(example); make TARGET=$(target) clean);))
|
||||
(cd $(EXAMPLESDIR)/$(example); $(MAKE) TARGET=$(target) clean);))
|
||||
|
375
tools/makefsdata
375
tools/makefsdata
@ -1,375 +0,0 @@
|
||||
#!/usr/bin/perl
|
||||
#Generate a .c source that preinitializes a file system
|
||||
#David Kopf <dak664@embarqmail.com> July 2009
|
||||
#Extended from the existing non-coffee tool
|
||||
|
||||
#The simplest format is used with httpd-fs.c. It contains a linked
|
||||
#list pointing to file names and file contents.
|
||||
|
||||
#An extension uses the same linked list that points to a the file names
|
||||
#and contents within a coffee file system having fixed file sizes.
|
||||
#This allows the content to be rewritten by coffee while still being
|
||||
#readable by httpd-fs.c New files or sector extension of existing files
|
||||
#is not possible, but a larger initial sector allocation could be given
|
||||
#to some files to allow a limited size increase. Since this would leave the
|
||||
#wrong file size in the linked list, httpd-fs would have to terminate on
|
||||
#a reverse zero scan like coffee does. Rewriting the linked list would
|
||||
#probably be OK if in eeprom, but not if in program flash. Suggestions
|
||||
#for a workaround would be welcome!
|
||||
|
||||
#Lastly a full coffee file system can be preinitialized. File reads must
|
||||
#then be done using coffee.
|
||||
|
||||
#Assumes the coffee file_header structure is
|
||||
#struct file_header {
|
||||
# coffee_page_t log_page;
|
||||
# uint16_t log_records;
|
||||
# uint16_t log_record_size;
|
||||
# coffee_page_t max_pages;
|
||||
# uint8_t deprecated_eof_hint;
|
||||
# uint8_t flags=3 for the initial value;
|
||||
# char name[COFFEE_NAME_LENGTH];
|
||||
# } __attribute__((packed));
|
||||
|
||||
goto DEFAULTS;
|
||||
START:$version="1.1";
|
||||
|
||||
#Process options
|
||||
for($n=0;$n<=$#ARGV;$n++) {
|
||||
$arg=$ARGV[$n];
|
||||
if ($arg eq "-v") {
|
||||
print "makefsdata Version $version\n";
|
||||
} elsif ($arg eq "-A") {
|
||||
$n++;$attribute=$ARGV[$n];
|
||||
} elsif ($arg eq "-C") {
|
||||
$coffee=1;
|
||||
} elsif ($arg eq "-c") {
|
||||
$complement=1;
|
||||
} elsif ($arg eq "-i") {
|
||||
$n++;$includefile=$ARGV[$n];
|
||||
# } elsif ($arg eq "-p") {
|
||||
# $n++;$coffee_page_length=$ARGV[$n];
|
||||
} elsif ($arg eq "-s") {
|
||||
$n++;$coffee_sector_size=$ARGV[$n];
|
||||
} elsif ($arg eq "-t") {
|
||||
$n++;$coffee_page_t =$ARGV[$n];
|
||||
} elsif ($arg eq "-f") {
|
||||
$n++;$coffee_name_length=$ARGV[$n];
|
||||
} elsif ($arg eq "-S") {
|
||||
$n++;$sectionname=$ARGV[$n];
|
||||
} elsif ($arg eq "-l") {
|
||||
$linkedlist=1;
|
||||
} elsif ($arg eq "-d") {
|
||||
$n++;$directory=$ARGV[$n];
|
||||
} elsif ($arg eq "-o") {
|
||||
$n++;$outputfile=$ARGV[$n];
|
||||
$coffeefile=$outputfile;
|
||||
} else {
|
||||
DEFAULTS:
|
||||
#Set up defaults
|
||||
$coffee=0;
|
||||
#$coffee_page_length=256;
|
||||
$coffee_sector_size=256;
|
||||
$coffee_page_t=1;
|
||||
$coffee_name_length=16;
|
||||
$complement=0;
|
||||
$directory="";
|
||||
$outputfile="httpd-fsdata.c";
|
||||
$coffeefile="httpd-coffeedata.c";
|
||||
$includefile="makefsdata.h";
|
||||
$linkedlist=0;
|
||||
$attribute="";
|
||||
$sectionname=".coffeefiles";
|
||||
if (!$version) {goto START;}
|
||||
print "\n";
|
||||
print "Usage: makefsdata <option(s)> <-d input_directory> <-o output_file>\n\n";
|
||||
print " Generates c source file to pre-initialize a contiki file system.\n";
|
||||
print " The default output is a simple linked list readable by httpd-fs.c\n";
|
||||
print " and written to $outputfile.\n\n";
|
||||
print " The -C option makes a coffee file system to default output $coffeefile.\n";
|
||||
print " A linked list can be still be generated for use with httpd-fs.c so long\n";
|
||||
print " as coffee does not extend, delete, or add any files.\n\n";
|
||||
print " The input directory structure is copied. If input_directory is specified\n";
|
||||
print " it becomes the root \"/\". If no input_directory is specified the first\n";
|
||||
print " subdirectory found in the current directory is used as the root.\n\n";
|
||||
print " WARNING : If the output file exists it will be overwritten without confirmation!\n\n";
|
||||
print " Options are:\n";
|
||||
print " -v Display the version number\n";
|
||||
print " -A attribute Append \"attribute\" to the declaration, e.g. PROGMEM to put data in AVR program flash memory\n";
|
||||
print " -C Use coffee file system format\n";
|
||||
print " -c Complement the data, useful for obscurity or fast page erases for coffee\n";
|
||||
print " -i filename Treat any input files with name \"filename\" as include files.\n";
|
||||
print " Useful for giving a server a name and ip address associated with the web content.\n";
|
||||
print " The default is $includefile.\n\n";
|
||||
print " The following apply only to coffee file system\n";
|
||||
# print " -p pagesize Page size in bytes (default $coffee_page_length)\n";
|
||||
print " -s sectorsize Sector size in bytes (default $coffee_sector_size)\n";
|
||||
print " -t page_t Number of bytes in coffee_page_t (1,2,or 4, default $coffee_page_t)\n";
|
||||
print " -f namesize File name field size in bytes (default $coffee_name_length)\n";
|
||||
print " -S section Section name for data (default $sectionname)\n";
|
||||
print " -l Append a linked list for use with httpd-fs\n";
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
#--------------------Configure parameters-----------------------
|
||||
if ($coffee) {
|
||||
$outputfile=$coffeefile;
|
||||
$coffee_header_length=2*$coffee_page_t+$coffee_name_length+6;
|
||||
if ($coffee_page_t==1) {
|
||||
$coffeemax=0xff;
|
||||
} elsif ($coffee_page_t==2) {
|
||||
$coffeemax=0xffff;
|
||||
} elsif ($coffee_page_t==4) {
|
||||
$coffeemax=0xffffffff;
|
||||
} else {
|
||||
die "Unsupported coffee_page_t $coffee_page_t\n";
|
||||
}
|
||||
} else {
|
||||
# $coffee_page_length=1;
|
||||
$coffee_sector_size=1;
|
||||
$linkedlist=1;
|
||||
$coffee_name_length=256;
|
||||
$coffee_max=0xffffffff;
|
||||
$coffee_header_length=0;
|
||||
}
|
||||
$null="0x00";if ($complement) {$null="0xff";}
|
||||
$tab=" "; #optional tabs or spaces at beginning of line, e.g. "\t\t"
|
||||
|
||||
#--------------------Create output file-------------------------
|
||||
#awkward but could not figure out how to compare paths later unless the file exists -- dak
|
||||
if (!open(OUTPUT, "> $outputfile")) {die "Aborted: Could not create output file $outputfile";}
|
||||
print(OUTPUT "\n");
|
||||
close($outputfile);
|
||||
use Cwd qw(abs_path);
|
||||
if (!open(OUTPUT, "> $outputfile")) {die "Aborted: Could not create output file $outputfile";}
|
||||
$outputfile=abs_path($outputfile);
|
||||
|
||||
#--------------------Get a list of input files------------------
|
||||
if ($directory eq "") {
|
||||
opendir(DIR, ".");
|
||||
@files = grep { !/^\./ && !/(CVS|~)/ } readdir(DIR);
|
||||
closedir(DIR);
|
||||
foreach $file (@files) {
|
||||
if(-d $file && $file !~ /^\./) {
|
||||
$directory=$file;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if ($directory eq "") {die "Aborted: No subdirectory in current directory";}
|
||||
if (!chdir("$directory")) {die "Aborted: Directory \"$directory\" does not exist!";}
|
||||
|
||||
if ($coffee) {
|
||||
print "Processing directory $directory as root of coffee file system\n";
|
||||
} else {
|
||||
print "Processing directory $directory as root of packed httpd-fs file system\n";
|
||||
}
|
||||
opendir(DIR, ".");
|
||||
@files = grep { !/^\./ && !/(CVS|~)/ && !/(makefsdata.ignore)/ } readdir(DIR);
|
||||
closedir(DIR);
|
||||
|
||||
foreach $file (@files) {
|
||||
if(-d $file && $file !~ /^\./) {
|
||||
print "Adding subdirectory $file\n";
|
||||
opendir(DIR, $file);
|
||||
@newfiles = grep { !/^\./ && !/(CVS|~)/ } readdir(DIR);
|
||||
closedir(DIR);
|
||||
# printf "Adding files @newfiles\n";
|
||||
@files = (@files, map { $_ = "$file/$_" } @newfiles);
|
||||
next;
|
||||
}
|
||||
}
|
||||
#--------------------Write the output file-------------------
|
||||
print "Writing to $outputfile\n";
|
||||
($DAY, $MONTH, $YEAR) = (localtime)[3,4,5];
|
||||
printf(OUTPUT "/*********Generated by contiki/tools/makefsdata on %04d-%02d-%02d*********/\n", $YEAR+1900, $MONTH+1, $DAY);
|
||||
if ($coffee) {
|
||||
print(OUTPUT "/*For coffee filesystem of sector size $coffee_sector_size and header length $coffee_header_length bytes*/\n");
|
||||
}
|
||||
print(OUTPUT "\n");
|
||||
#--------------------Process include file-------------------
|
||||
foreach $file (@files) {if ($file eq $includefile) {
|
||||
open(FILE, $file) || die "Aborted: Could not open include file $file\n";
|
||||
print "Including text from $file\n";
|
||||
$file_length= -s FILE;
|
||||
read(FILE, $data, $file_length);
|
||||
print OUTPUT ($data);
|
||||
close(FILE);
|
||||
next; #include all include file matches
|
||||
# break; #include only first include file match
|
||||
}}
|
||||
|
||||
#--------------------Process data files-------------------
|
||||
$n=0;$coffeesize=0;$coffeesectors=0;
|
||||
foreach $file (@files) {if(-f $file) {
|
||||
if (length($file)>=($coffee_name_length-1)) {die "Aborted: File name $file is too long";}
|
||||
if (abs_path("$file") eq abs_path("$outputfile")) {
|
||||
print "Skipping output file $outputfile - recursive input NOT allowed\n";
|
||||
next;
|
||||
}
|
||||
if ($file eq $includefile) {next;}
|
||||
open(FILE, $file) || die "Aborted: Could not open file $file\n";
|
||||
print "Adding /$file\n";
|
||||
if (grep /.png/||/.jpg/||/jpeg/||/.pdf/||/.gif/||/.bin/||/.zip/,$file) {binmode FILE;}
|
||||
|
||||
$file_length= -s FILE;
|
||||
$file =~ s-^-/-;
|
||||
$fvar = $file;
|
||||
$fvar =~ s-/-_-g;
|
||||
$fvar =~ s-\.-_-g;
|
||||
|
||||
if ($coffee) {
|
||||
$coffee_sectors=int(($coffee_header_length+$file_length+$coffee_sector_size-1)/$coffee_sector_size);
|
||||
# $coffee_sectors=sprintf("%.0f",($coffee_header_length+$file_length+$coffee_sector_size-1)/$coffee_sector_size)-1;
|
||||
$coffee_length=$coffee_sectors*$coffee_sector_size;
|
||||
} else {
|
||||
$coffee_length=$file_length+length($file)+1;
|
||||
}
|
||||
$flen[$n]=$file_length;
|
||||
$clen[$n]=$coffee_length;
|
||||
$n++;$coffeesectors+=$coffee_sectors;$coffeesize+=$coffee_length;
|
||||
if ($coffee) {
|
||||
if ($coffeesectors>$coffeemax) {
|
||||
print "Warning: sector number $coffeesectors overflows allocated sector size in coffee header\n";
|
||||
}
|
||||
print(OUTPUT "\n__attribute__ ((section (\"$sectionname\")))\n");
|
||||
print(OUTPUT "volatile const char data".$fvar."[$coffee_length] = {\n");
|
||||
} else {
|
||||
print(OUTPUT "\nconst char data".$fvar."[$coffee_length] $attribute = {\n");
|
||||
}
|
||||
print(OUTPUT "$tab/* $file */\n$tab");
|
||||
#--------------------Header-----------------------------
|
||||
#log_page
|
||||
if ($coffee) {
|
||||
print (OUTPUT " ");
|
||||
for($j=0;$j<$coffee_page_t;$j++) {print (OUTPUT "$ null ,");}
|
||||
#log_records, log_record_size
|
||||
for($j=0;$j<4;$j++) {print (OUTPUT "$null, ");}
|
||||
#max_pages
|
||||
if ($complement) {$coffee_sectors=$coffee_sectors^0xffffffff;}
|
||||
if ($coffee_page_t==1) {
|
||||
printf(OUTPUT "0x%2.2x, ",($coffee_sectors )&0xff);
|
||||
} elsif ($coffee_page_t==2) {
|
||||
printf(OUTPUT "0x%2.2x, ",($coffee_sectors>> 8)&0xff);
|
||||
printf(OUTPUT "0x%2.2x, ",($coffee_sectors )&0xff);
|
||||
} elsif ($coffee_page_t==4) {
|
||||
printf(OUTPUT "0x%2.2x, ",($coffee_sectors>>24)&0xff);
|
||||
printf(OUTPUT "0x%2.2x, ",($coffee_sectors>>16)&0xff);
|
||||
printf(OUTPUT "0x%2.2x, ",($coffee_sectors>> 8)&0xff);
|
||||
printf(OUTPUT "0x%2.2x, ",($coffee_sectors )&0xff);
|
||||
}
|
||||
if ($complement) {$coffee_sectors=$coffee_sectors^0xffffffff;}
|
||||
#eof hint and flags
|
||||
if ($complement) {
|
||||
print(OUTPUT "0xff, 0xfc,\n$tab");
|
||||
} else {
|
||||
print(OUTPUT "0x00, 0x03,\n$tab");
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------File name--------------------------
|
||||
for($j = 0; $j < length($file); $j++) {
|
||||
$temp=unpack("C", substr($file, $j, 1));
|
||||
if ($complement) {$temp=$temp^0xff;}
|
||||
printf(OUTPUT " %#02.2x,", $temp);
|
||||
}
|
||||
if ($coffee) {
|
||||
for(; $j < $coffee_name_length-1; $j++) {printf(OUTPUT " $null,");}
|
||||
{print(OUTPUT " $null");}
|
||||
} else {
|
||||
{printf(OUTPUT " $null");}
|
||||
}
|
||||
#------------------File Data---------------------------
|
||||
$coffee_length-=$coffee_header_length;
|
||||
$i = 10;
|
||||
while(read(FILE, $data, 1)) {
|
||||
$temp=unpack("C", $data);
|
||||
if ($complement) {$temp=$temp^0xff;}
|
||||
if($i == 10) {
|
||||
printf(OUTPUT ",\n$tab 0x%2.2x", $temp);
|
||||
$i = 0;
|
||||
} else {
|
||||
printf(OUTPUT ", 0x%2.2x", $temp)
|
||||
}
|
||||
$i++;$coffee_length--;
|
||||
}
|
||||
|
||||
if ($coffee) {
|
||||
print (OUTPUT ",");
|
||||
while (--$coffee_length) {
|
||||
if($i==9) {
|
||||
print(OUTPUT " $null,\n$tab");
|
||||
$i = 0;
|
||||
} else {
|
||||
print (OUTPUT " $null,");
|
||||
$i++;
|
||||
}
|
||||
}
|
||||
print (OUTPUT " $null");
|
||||
}
|
||||
print (OUTPUT "};\n");
|
||||
close(FILE);
|
||||
push(@fvars, $fvar);
|
||||
push(@pfiles, $file);
|
||||
}}
|
||||
|
||||
if ($linkedlist) {
|
||||
#-------------------httpd_fsdata_file links-------------------
|
||||
#The non-coffee PROGMEM flash file system for the Raven webserver uses a linked flash list as follows:
|
||||
print(OUTPUT "\n\n/* Structure of linked list (all offsets relative to start of section):\n");
|
||||
print(OUTPUT "struct httpd_fsdata_file {\n");
|
||||
print(OUTPUT "$tab const struct httpd_fsdata_file *next; //actual flash address of next link\n");
|
||||
print(OUTPUT "$tab const char *name; //offset to coffee file name\n");
|
||||
print(OUTPUT "$tab const char *data; //offset to coffee file data\n");
|
||||
print(OUTPUT "$tab const int len; //length of file data\n");
|
||||
print(OUTPUT "#if HTTPD_FS_STATISTICS == 1 //not enabled since list is in PROGMEM\n");
|
||||
print(OUTPUT "$tab uint16_t count; //storage for file statistics\n");
|
||||
print(OUTPUT "#endif\n");
|
||||
print(OUTPUT "}\n*/\n");
|
||||
|
||||
# For the static httpd-fs.c file system the file name and data addresses in the linked list
|
||||
# point to the actual memory locations.
|
||||
# For the coffee file system the addresses start from zero. The starting address must be added
|
||||
# in the coffee read routine.
|
||||
|
||||
for($i = 0; $i < @fvars; $i++) {
|
||||
$file = $pfiles[$i];
|
||||
$fvar = $fvars[$i];
|
||||
if($i == 0) {
|
||||
$prevfile = "NULL";
|
||||
$data_offset=0;
|
||||
} else {
|
||||
$data_offset=$data_offset+$clen[$i-1];
|
||||
$prevfile = "file" . $fvars[$i - 1];
|
||||
}
|
||||
$filename_offset=$data_offset+6+2*$coffee_page_t;
|
||||
$coffee_offset=$data_offset+$coffee_header_length;
|
||||
if ($coffee_offset>0xffff) {print "Warning : Linked list offset field overflow\n";}
|
||||
print(OUTPUT "const struct httpd_fsdata_file");
|
||||
for ($t=length($file);$t<16;$t++) {print(OUTPUT " ")};
|
||||
print(OUTPUT " file".$fvar."[] ");
|
||||
if ($attribute) {print(OUTPUT "$attribute ");}
|
||||
print(OUTPUT "={{");
|
||||
for ($t=length($prevfile);$t<20;$t++) {print(OUTPUT " ")};
|
||||
print(OUTPUT "$prevfile, ");
|
||||
if ($coffee) {
|
||||
printf(OUTPUT "(const char *)0x%4.4x, ",$filename_offset);
|
||||
printf(OUTPUT "(const char *)0x%4.4x, ",$coffee_offset);
|
||||
printf(OUTPUT "%5u}};\n",$flen[$i]);
|
||||
} else {
|
||||
print(OUTPUT "data$fvar");
|
||||
for ($t=length($file);$t<15;$t++) {print(OUTPUT " ")};
|
||||
print(OUTPUT ", data$fvar");
|
||||
for ($t=length($file);$t<15;$t++) {print(OUTPUT " ")};
|
||||
print(OUTPUT " +".(length($file)+1).", sizeof(data$fvar)");
|
||||
for ($t=length($file);$t<16;$t++) {print(OUTPUT " ")};
|
||||
print(OUTPUT " -".(length($file)+1)."}};\n");
|
||||
}
|
||||
}
|
||||
print(OUTPUT "\n#define HTTPD_FS_ROOT file$fvars[$n-1]\n");
|
||||
print(OUTPUT "#define HTTPD_FS_NUMFILES $n\n");
|
||||
print(OUTPUT "#define HTTPD_FS_SIZE $coffeesize\n");
|
||||
}
|
||||
print "All done, files occupy $coffeesize bytes\n";
|
||||
|
Loading…
Reference in New Issue
Block a user