- Timestamp:
- 2018-03-02T20:10:49Z (7 years ago)
- Branches:
- lfn, master, serial, ticket/834-toolchain-update, topic/msim-upgrade, topic/simplify-dev-export
- Children:
- f1380b7
- Parents:
- 3061bc1
- git-author:
- Jiří Zárevúcky <zarevucky.jiri@…> (2018-02-28 17:38:31)
- git-committer:
- Jiří Zárevúcky <zarevucky.jiri@…> (2018-03-02 20:10:49)
- Location:
- tools
- Files:
-
- 19 edited
Legend:
- Unmodified
- Added
- Removed
-
tools/autogen.py
r3061bc1 ra35b458 85 85 code = code + ("\temit_constant(%s_%s_ITEM_SIZE, sizeof(%s));\n" % 86 86 (struct['name'].upper(), member['name'].upper(), member['type'])) 87 87 88 88 return code 89 89 … … 113 113 """ % (generate_includes(struct), generate_struct(struct), 114 114 generate_probes(struct), name.upper(), typename) 115 115 116 116 return code 117 117 … … 159 159 pairs = pairs + [res.group(1, 3)] 160 160 return pairs 161 161 162 162 163 163 def run(): -
tools/autotool.py
r3061bc1 ra35b458 93 93 def read_config(fname, config): 94 94 "Read HelenOS build configuration" 95 95 96 96 inf = open(fname, 'r') 97 97 98 98 for line in inf: 99 99 res = re.match(r'^(?:#!# )?([^#]\w*)\s*=\s*(.*?)\s*$', line) 100 100 if (res): 101 101 config[res.group(1)] = res.group(2) 102 102 103 103 inf.close() 104 104 105 105 def print_error(msg): 106 106 "Print a bold error message" 107 107 108 108 sys.stderr.write("\n") 109 109 sys.stderr.write("######################################################################\n") … … 113 113 sys.stderr.write("######################################################################\n") 114 114 sys.stderr.write("\n") 115 115 116 116 sys.exit(1) 117 117 118 118 def print_warning(msg): 119 119 "Print a bold error message" 120 120 121 121 sys.stderr.write("\n") 122 122 sys.stderr.write("######################################################################\n") … … 126 126 sys.stderr.write("######################################################################\n") 127 127 sys.stderr.write("\n") 128 128 129 129 time.sleep(5) 130 130 131 131 def sandbox_enter(): 132 132 "Create a temporal sandbox directory for running tests" 133 133 134 134 if (os.path.exists(SANDBOX)): 135 135 if (os.path.isdir(SANDBOX)): … … 141 141 print_error(["Please inspect and remove unexpected directory,", 142 142 "entry \"%s\"." % SANDBOX]) 143 143 144 144 try: 145 145 os.mkdir(SANDBOX) 146 146 except: 147 147 print_error(["Unable to create sandbox directory \"%s\"." % SANDBOX]) 148 148 149 149 owd = os.getcwd() 150 150 os.chdir(SANDBOX) 151 151 152 152 return owd 153 153 154 154 def sandbox_leave(owd): 155 155 "Leave the temporal sandbox directory" 156 156 157 157 os.chdir(owd) 158 158 159 159 def check_config(config, key): 160 160 "Check whether the configuration key exists" 161 161 162 162 if (not key in config): 163 163 print_error(["Build configuration of HelenOS does not contain %s." % key, … … 167 167 def check_common(common, key): 168 168 "Check whether the common key exists" 169 169 170 170 if (not key in common): 171 171 print_error(["Failed to determine the value %s." % key, … … 178 178 target = None 179 179 cc_args = [] 180 180 181 181 if (config['PLATFORM'] == "abs32le"): 182 182 check_config(config, "CROSS_TARGET") 183 183 platform = config['CROSS_TARGET'] 184 184 185 185 if (config['CROSS_TARGET'] == "arm32"): 186 186 gnu_target = "arm-linux-gnueabi" 187 187 helenos_target = "arm-helenos-gnueabi" 188 188 189 189 if (config['CROSS_TARGET'] == "ia32"): 190 190 gnu_target = "i686-pc-linux-gnu" 191 191 helenos_target = "i686-pc-helenos" 192 192 193 193 if (config['CROSS_TARGET'] == "mips32"): 194 194 cc_args.append("-mabi=32") 195 195 gnu_target = "mipsel-linux-gnu" 196 196 helenos_target = "mipsel-helenos" 197 197 198 198 if (config['PLATFORM'] == "amd64"): 199 199 platform = config['PLATFORM'] 200 200 gnu_target = "amd64-unknown-elf" 201 201 helenos_target = "amd64-helenos" 202 202 203 203 if (config['PLATFORM'] == "arm32"): 204 204 platform = config['PLATFORM'] 205 205 gnu_target = "arm-linux-gnueabi" 206 206 helenos_target = "arm-helenos-gnueabi" 207 207 208 208 if (config['PLATFORM'] == "ia32"): 209 209 platform = config['PLATFORM'] 210 210 gnu_target = "i686-pc-linux-gnu" 211 211 helenos_target = "i686-pc-helenos" 212 212 213 213 if (config['PLATFORM'] == "ia64"): 214 214 platform = config['PLATFORM'] 215 215 gnu_target = "ia64-pc-linux-gnu" 216 216 helenos_target = "ia64-pc-helenos" 217 217 218 218 if (config['PLATFORM'] == "mips32"): 219 219 check_config(config, "MACHINE") 220 220 cc_args.append("-mabi=32") 221 221 222 222 if ((config['MACHINE'] == "msim") or (config['MACHINE'] == "lmalta")): 223 223 platform = config['PLATFORM'] 224 224 gnu_target = "mipsel-linux-gnu" 225 225 helenos_target = "mipsel-helenos" 226 226 227 227 if ((config['MACHINE'] == "bmalta")): 228 228 platform = "mips32eb" 229 229 gnu_target = "mips-linux-gnu" 230 230 helenos_target = "mips-helenos" 231 231 232 232 if (config['PLATFORM'] == "mips64"): 233 233 check_config(config, "MACHINE") 234 234 cc_args.append("-mabi=64") 235 235 236 236 if (config['MACHINE'] == "msim"): 237 237 platform = config['PLATFORM'] 238 238 gnu_target = "mips64el-linux-gnu" 239 239 helenos_target = "mips64el-helenos" 240 240 241 241 if (config['PLATFORM'] == "ppc32"): 242 242 platform = config['PLATFORM'] 243 243 gnu_target = "ppc-linux-gnu" 244 244 helenos_target = "ppc-helenos" 245 245 246 246 if (config['PLATFORM'] == "riscv64"): 247 247 platform = config['PLATFORM'] 248 248 gnu_target = "riscv64-unknown-linux-gnu" 249 249 helenos_target = "riscv64-helenos" 250 250 251 251 if (config['PLATFORM'] == "sparc64"): 252 252 platform = config['PLATFORM'] 253 253 gnu_target = "sparc64-linux-gnu" 254 254 helenos_target = "sparc64-helenos" 255 255 256 256 if (config['COMPILER'] == "gcc_helenos"): 257 257 target = helenos_target 258 258 else: 259 259 target = gnu_target 260 260 261 261 return (platform, cc_args, target) 262 262 263 263 def check_app(args, name, details): 264 264 "Check whether an application can be executed" 265 265 266 266 try: 267 267 sys.stderr.write("Checking for %s ... " % args[0]) … … 273 273 "Execution of \"%s\" has failed. Please make sure that it" % " ".join(args), 274 274 "is installed in your system (%s)." % details]) 275 275 276 276 sys.stderr.write("ok\n") 277 277 278 278 def check_app_alternatives(alts, args, name, details): 279 279 "Check whether an application can be executed (use several alternatives)" 280 280 281 281 tried = [] 282 282 found = None 283 283 284 284 for alt in alts: 285 285 working = True 286 286 cmdline = [alt] + args 287 287 tried.append(" ".join(cmdline)) 288 288 289 289 try: 290 290 sys.stderr.write("Checking for %s ... " % alt) … … 293 293 sys.stderr.write("failed\n") 294 294 working = False 295 295 296 296 if (working): 297 297 sys.stderr.write("ok\n") 298 298 found = alt 299 299 break 300 300 301 301 if (found is None): 302 302 print_error(["%s is missing." % name, … … 306 306 "", 307 307 "The following alternatives were tried:"] + tried) 308 308 309 309 return found 310 310 311 311 def check_clang(path, prefix, common, details): 312 312 "Check for clang" 313 313 314 314 common['CLANG'] = "%sclang" % prefix 315 315 316 316 if (not path is None): 317 317 common['CLANG'] = "%s/%s" % (path, common['CLANG']) 318 318 319 319 check_app([common['CLANG'], "--version"], "clang", details) 320 320 321 321 def check_gcc(path, prefix, common, details): 322 322 "Check for GCC" 323 323 324 324 common['GCC'] = "%sgcc" % prefix 325 325 326 326 if (not path is None): 327 327 common['GCC'] = "%s/%s" % (path, common['GCC']) 328 328 329 329 check_app([common['GCC'], "--version"], "GNU GCC", details) 330 330 331 331 def check_binutils(path, prefix, common, details): 332 332 "Check for binutils toolchain" 333 333 334 334 common['AS'] = "%sas" % prefix 335 335 common['LD'] = "%sld" % prefix … … 338 338 common['OBJDUMP'] = "%sobjdump" % prefix 339 339 common['STRIP'] = "%sstrip" % prefix 340 340 341 341 if (not path is None): 342 342 for key in ["AS", "LD", "AR", "OBJCOPY", "OBJDUMP", "STRIP"]: 343 343 common[key] = "%s/%s" % (path, common[key]) 344 344 345 345 check_app([common['AS'], "--version"], "GNU Assembler", details) 346 346 check_app([common['LD'], "--version"], "GNU Linker", details) … … 352 352 def check_python(): 353 353 "Check for Python dependencies" 354 354 355 355 try: 356 356 sys.stderr.write("Checking for PyYAML ... ") … … 361 361 "Please make sure that it is installed in your", 362 362 "system (usually part of PyYAML package)."]) 363 363 364 364 sys.stderr.write("ok\n") 365 365 366 366 def decode_value(value): 367 367 "Decode integer value" 368 368 369 369 base = 10 370 370 371 371 if ((value.startswith('$')) or (value.startswith('#'))): 372 372 value = value[1:] 373 373 374 374 if (value.startswith('0x')): 375 375 value = value[2:] 376 376 base = 16 377 377 378 378 return int(value, base) 379 379 380 380 def probe_compiler(common, typesizes): 381 381 "Generate, compile and parse probing source" 382 382 383 383 check_common(common, "CC") 384 384 385 385 outf = open(PROBE_SOURCE, 'w') 386 386 outf.write(PROBE_HEAD) 387 387 388 388 for typedef in typesizes: 389 389 if 'def' in typedef: … … 392 392 if 'def' in typedef: 393 393 outf.write("#endif\n") 394 394 395 395 outf.write(PROBE_TAIL) 396 396 outf.close() 397 397 398 398 args = common['CC_AUTOGEN'].split(' ') 399 399 args.extend(["-S", "-o", PROBE_OUTPUT, PROBE_SOURCE]) 400 400 401 401 try: 402 402 sys.stderr.write("Checking compiler properties ... ") … … 406 406 print_error(["Error executing \"%s\"." % " ".join(args), 407 407 "Make sure that the compiler works properly."]) 408 408 409 409 if (not os.path.isfile(PROBE_OUTPUT)): 410 410 sys.stderr.write("failed\n") … … 415 415 output[0], 416 416 output[1]]) 417 417 418 418 sys.stderr.write("ok\n") 419 419 420 420 inf = open(PROBE_OUTPUT, 'r') 421 421 lines = inf.readlines() 422 422 inf.close() 423 423 424 424 builtins = {} 425 425 426 426 for j in range(len(lines)): 427 427 tokens = lines[j].strip().split("\t") 428 428 429 429 if (len(tokens) > 0): 430 430 if (tokens[0] == "AUTOTOOL_DECLARE"): 431 431 if (len(tokens) < 8): 432 432 print_error(["Malformed declaration in \"%s\" on line %s." % (PROBE_OUTPUT, j), COMPILER_FAIL]) 433 433 434 434 category = tokens[1] 435 435 tag = tokens[2] … … 439 439 size = tokens[6] 440 440 compatible = tokens[7] 441 441 442 442 try: 443 443 compatible_int = decode_value(compatible) … … 445 445 except: 446 446 print_error(["Integer value expected in \"%s\" on line %s." % (PROBE_OUTPUT, j), COMPILER_FAIL]) 447 447 448 448 if (compatible_int == 1): 449 449 builtins[tag] = { … … 454 454 'size': size_int, 455 455 } 456 456 457 457 for typedef in typesizes: 458 458 if not typedef['tag'] in builtins: … … 461 461 if 'sname' in typedef: 462 462 builtins[typedef['tag']]['sname'] = typedef['sname'] 463 463 464 464 return builtins 465 465 … … 490 490 def detect_sizes(probe): 491 491 "Detect properties of builtin types" 492 492 493 493 macros = {} 494 494 495 495 for type in probe.values(): 496 496 macros['__SIZEOF_%s__' % type['tag']] = type['size'] 497 497 498 498 if ('sname' in type): 499 499 macros['__%s_TYPE__' % type['sname']] = type['name'] … … 502 502 macros['__%s_C_SUFFIX__' % type['sname']] = get_suffix(type) 503 503 macros['__%s_MAX__' % type['sname']] = "%d%s" % (get_max(type), get_suffix(type)) 504 504 505 505 if (probe['SIZE_T']['sign'] != 'unsigned'): 506 506 print_error(['The type size_t is not unsigned.', COMPILER_FAIL]) 507 507 508 508 return macros 509 509 510 510 def create_makefile(mkname, common): 511 511 "Create makefile output" 512 512 513 513 outmk = open(mkname, 'w') 514 514 515 515 outmk.write('#########################################\n') 516 516 outmk.write('## AUTO-GENERATED FILE, DO NOT EDIT!!! ##\n') 517 517 outmk.write('## Generated by: tools/autotool.py ##\n') 518 518 outmk.write('#########################################\n\n') 519 519 520 520 for key, value in common.items(): 521 521 if (type(value) is list): … … 523 523 else: 524 524 outmk.write('%s = %s\n' % (key, value)) 525 525 526 526 outmk.close() 527 527 528 528 def create_header(hdname, macros): 529 529 "Create header output" 530 530 531 531 outhd = open(hdname, 'w') 532 532 533 533 outhd.write('/***************************************\n') 534 534 outhd.write(' * AUTO-GENERATED FILE, DO NOT EDIT!!! *\n') 535 535 outhd.write(' * Generated by: tools/autotool.py *\n') 536 536 outhd.write(' ***************************************/\n\n') 537 537 538 538 outhd.write('#ifndef %s\n' % GUARD) 539 539 outhd.write('#define %s\n\n' % GUARD) 540 540 541 541 for macro in sorted(macros): 542 542 outhd.write('#ifndef %s\n' % macro) 543 543 outhd.write('#define %s %s\n' % (macro, macros[macro])) 544 544 outhd.write('#endif\n\n') 545 545 546 546 outhd.write('\n#endif\n') 547 547 outhd.close() … … 550 550 config = {} 551 551 common = {} 552 552 553 553 # Read and check configuration 554 554 if os.path.exists(CONFIG): … … 558 558 "configuration phase of HelenOS build went OK. Try running", 559 559 "\"make config\" again."]) 560 560 561 561 check_config(config, "PLATFORM") 562 562 check_config(config, "COMPILER") 563 563 check_config(config, "BARCH") 564 564 565 565 # Cross-compiler prefix 566 566 if ('CROSS_PREFIX' in os.environ): … … 568 568 else: 569 569 cross_prefix = "/usr/local/cross" 570 570 571 571 # HelenOS cross-compiler prefix 572 572 if ('CROSS_HELENOS_PREFIX' in os.environ): … … 574 574 else: 575 575 cross_helenos_prefix = "/usr/local/cross-helenos" 576 576 577 577 # Prefix binutils tools on Solaris 578 578 if (os.uname()[0] == "SunOS"): … … 580 580 else: 581 581 binutils_prefix = "" 582 582 583 583 owd = sandbox_enter() 584 584 585 585 try: 586 586 # Common utilities … … 593 593 check_app(["make", "--version"], "Make utility", "preferably GNU Make") 594 594 check_app(["unzip"], "unzip utility", "usually part of zip/unzip utilities") 595 595 596 596 platform, cc_args, target = get_target(config) 597 597 598 598 if (platform is None) or (target is None): 599 599 print_error(["Unsupported compiler target.", 600 600 "Please contact the developers of HelenOS."]) 601 601 602 602 path = "%s/%s/bin" % (cross_prefix, target) 603 603 604 604 # Compatibility with earlier toolchain paths. 605 605 if not os.path.exists(path): … … 614 614 print_error(["Toolchain for target is not installed, or CROSS_PREFIX is not set correctly."]) 615 615 path = "%s/%s/bin" % (cross_prefix, platform) 616 616 617 617 common['TARGET'] = target 618 618 prefix = "%s-" % target 619 619 620 620 # Compiler 621 621 if (config['COMPILER'] == "gcc_cross" or config['COMPILER'] == "gcc_helenos"): 622 622 check_gcc(path, prefix, common, PACKAGE_CROSS) 623 623 check_binutils(path, prefix, common, PACKAGE_CROSS) 624 624 625 625 check_common(common, "GCC") 626 626 common['CC'] = " ".join([common['GCC']] + cc_args) 627 627 common['CC_AUTOGEN'] = common['CC'] 628 628 629 629 if (config['COMPILER'] == "gcc_native"): 630 630 check_gcc(None, "", common, PACKAGE_GCC) 631 631 check_binutils(None, binutils_prefix, common, PACKAGE_BINUTILS) 632 632 633 633 check_common(common, "GCC") 634 634 common['CC'] = common['GCC'] 635 635 common['CC_AUTOGEN'] = common['CC'] 636 636 637 637 if (config['COMPILER'] == "clang"): 638 638 check_binutils(path, prefix, common, PACKAGE_CROSS) 639 639 check_clang(path, prefix, common, PACKAGE_CLANG) 640 640 641 641 check_common(common, "CLANG") 642 642 common['CC'] = " ".join([common['CLANG']] + cc_args) 643 643 common['CC_AUTOGEN'] = common['CC'] + " -no-integrated-as" 644 644 645 645 if (config['INTEGRATED_AS'] == "yes"): 646 646 common['CC'] += " -integrated-as" 647 647 648 648 if (config['INTEGRATED_AS'] == "no"): 649 649 common['CC'] += " -no-integrated-as" 650 650 651 651 check_python() 652 652 653 653 # Platform-specific utilities 654 654 if ((config['BARCH'] == "amd64") or (config['BARCH'] == "ia32") or (config['BARCH'] == "ppc32") or (config['BARCH'] == "sparc64")): … … 656 656 if common['GENISOIMAGE'] == 'xorriso': 657 657 common['GENISOIMAGE'] += ' -as genisoimage' 658 658 659 659 probe = probe_compiler(common, 660 660 [ … … 675 675 ] 676 676 ) 677 677 678 678 macros = detect_sizes(probe) 679 679 680 680 finally: 681 681 sandbox_leave(owd) 682 682 683 683 common['AUTOGEN'] = "%s/autogen.py" % os.path.dirname(os.path.abspath(sys.argv[0])) 684 684 685 685 create_makefile(MAKEFILE, common) 686 686 create_header(HEADER, macros) 687 687 688 688 return 0 689 689 -
tools/checkers/clang.py
r3061bc1 ra35b458 46 46 def clang(root, job): 47 47 "Run Clang on a jobfile" 48 48 49 49 inname = os.path.join(root, job) 50 50 51 51 if (not os.path.isfile(inname)): 52 52 print("Unable to open %s" % inname) 53 53 print("Did you run \"make precheck\" on the source tree?") 54 54 return False 55 55 56 56 inf = open(inname, "r") 57 57 records = inf.read().splitlines() 58 58 inf.close() 59 59 60 60 for record in records: 61 61 arg = jobfile.parse_arg(record) 62 62 if (not arg): 63 63 return False 64 64 65 65 if (len(arg) < 6): 66 66 print("Not enough jobfile record arguments") 67 67 return False 68 68 69 69 srcfname = arg[0] 70 70 tgtfname = arg[1] … … 73 73 base = arg[4] 74 74 options = arg[5].split() 75 75 76 76 srcfqname = os.path.join(base, srcfname) 77 77 if (not os.path.isfile(srcfqname)): 78 78 print("Source %s not found" % srcfqname) 79 79 return False 80 80 81 81 # Only C files are interesting for us 82 82 if (tool != "cc"): 83 83 continue 84 84 85 85 args = ['clang', '-Qunused-arguments', '--analyze', 86 86 '-Xanalyzer', '-analyzer-opt-analyze-headers', … … 88 88 args.extend(options) 89 89 args.extend(['-o', tgtfname, srcfname]) 90 90 91 91 cwd = os.getcwd() 92 92 os.chdir(base) 93 93 retval = subprocess.Popen(args).wait() 94 94 os.chdir(cwd) 95 95 96 96 if (retval != 0): 97 97 return False 98 98 99 99 return True 100 100 … … 103 103 usage(sys.argv[0]) 104 104 return 105 105 106 106 rootdir = os.path.abspath(sys.argv[1]) 107 107 config = os.path.join(rootdir, "HelenOS.config") 108 108 109 109 if (not os.path.isfile(config)): 110 110 print("%s not found." % config) 111 111 print("Please specify the path to HelenOS build tree root as the first argument.") 112 112 return 113 113 114 114 for job in jobs: 115 115 if (not clang(rootdir, job)): … … 117 117 print("Failed job: %s" % job) 118 118 return 119 119 120 120 print 121 121 print("All jobs passed") -
tools/checkers/jobfile.py
r3061bc1 ra35b458 32 32 def parse_arg(record): 33 33 "Parse jobfile line arguments" 34 34 35 35 arg = [] 36 36 i = 0 … … 38 38 nil = True 39 39 inside = False 40 40 41 41 while (i < len(record)): 42 42 if (inside): … … 56 56 print("Unexpected '%s'" % record[i]) 57 57 return False 58 58 59 59 i += 1 60 60 61 61 if (not nil): 62 62 arg.append(current) 63 63 64 64 return arg -
tools/checkers/stanse.py
r3061bc1 ra35b458 47 47 def stanse(root, job): 48 48 "Run Stanse on a jobfile" 49 49 50 50 # Convert generic jobfile to Stanse-specific jobfile format 51 51 52 52 inname = os.path.join(root, job) 53 53 outname = os.path.join(root, "_%s" % os.path.basename(job)) 54 54 55 55 if (not os.path.isfile(inname)): 56 56 print("Unable to open %s" % inname) 57 57 print("Did you run \"make precheck\" on the source tree?") 58 58 return False 59 59 60 60 inf = open(inname, "r") 61 61 records = inf.read().splitlines() 62 62 inf.close() 63 63 64 64 output = [] 65 65 for record in records: … … 67 67 if (not arg): 68 68 return False 69 69 70 70 if (len(arg) < 6): 71 71 print("Not enough jobfile record arguments") 72 72 return False 73 73 74 74 srcfname = arg[0] 75 75 tgtfname = arg[1] … … 78 78 base = arg[4] 79 79 options = arg[5] 80 80 81 81 srcfqname = os.path.join(base, srcfname) 82 82 if (not os.path.isfile(srcfqname)): 83 83 print("Source %s not found" % srcfqname) 84 84 return False 85 85 86 86 # Only C files are interesting for us 87 87 if (tool != "cc"): 88 88 continue 89 89 90 90 output.append([srcfname, tgtfname, base, options]) 91 91 92 92 outf = open(outname, "w") 93 93 for record in output: 94 94 outf.write("{%s},{%s},{%s},{%s}\n" % (record[0], record[1], record[2], record[3])) 95 95 outf.close() 96 96 97 97 # Run Stanse 98 98 99 99 retval = subprocess.Popen(['stanse', '--checker', 'ReachabilityChecker', '--jobfile', outname]).wait() 100 100 101 101 # Cleanup 102 102 103 103 os.remove(outname) 104 104 for record in output: … … 106 106 if (os.path.isfile(tmpfile)): 107 107 os.remove(tmpfile) 108 108 109 109 if (retval == 0): 110 110 return True 111 111 112 112 return False 113 113 … … 116 116 usage(sys.argv[0]) 117 117 return 118 118 119 119 rootdir = os.path.abspath(sys.argv[1]) 120 120 config = os.path.join(rootdir, "HelenOS.config") 121 121 122 122 if (not os.path.isfile(config)): 123 123 print("%s not found." % config) 124 124 print("Please specify the path to HelenOS build tree root as the first argument.") 125 125 return 126 126 127 127 for job in jobs: 128 128 if (not stanse(rootdir, job)): … … 130 130 print("Failed job: %s" % job) 131 131 return 132 132 133 133 print 134 134 print("All jobs passed") -
tools/checkers/vcc.py
r3061bc1 ra35b458 53 53 def cygpath(upath): 54 54 "Convert Unix (Cygwin) path to Windows path" 55 55 56 56 return subprocess.Popen(['cygpath', '--windows', '--absolute', upath], stdout = subprocess.PIPE).communicate()[0].strip() 57 57 58 58 def preprocess(srcfname, tmpfname, base, options): 59 59 "Preprocess source using GCC preprocessor and compatibility tweaks" 60 60 61 61 global specification 62 62 63 63 args = ['gcc', '-E'] 64 64 args.extend(options.split()) 65 65 args.extend(['-DCONFIG_VERIFY_VCC=1', srcfname]) 66 66 67 67 # Change working directory 68 68 69 69 cwd = os.getcwd() 70 70 os.chdir(base) 71 71 72 72 preproc = subprocess.Popen(args, stdout = subprocess.PIPE).communicate()[0] 73 73 74 74 tmpf = open(tmpfname, "w") 75 75 tmpf.write(specification) 76 76 77 77 for line in preproc.splitlines(): 78 78 79 79 # Ignore preprocessor directives 80 80 81 81 if (line.startswith('#')): 82 82 continue 83 83 84 84 # Remove __attribute__((.*)) GCC extension 85 85 86 86 line = re.sub(re_attribute, "", line) 87 87 88 88 # Ignore unsupported __builtin_va_list type 89 89 # (a better solution replacing __builrin_va_list with 90 90 # an emulated implementation is needed) 91 91 92 92 line = re.sub(re_va_list, "void *", line) 93 93 94 94 tmpf.write("%s\n" % line) 95 95 96 96 tmpf.close() 97 97 98 98 os.chdir(cwd) 99 99 100 100 return True 101 101 102 102 def vcc(vcc_path, root, job): 103 103 "Run Vcc on a jobfile" 104 104 105 105 # Parse jobfile 106 106 107 107 inname = os.path.join(root, job) 108 108 109 109 if (not os.path.isfile(inname)): 110 110 print("Unable to open %s" % inname) 111 111 print("Did you run \"make precheck\" on the source tree?") 112 112 return False 113 113 114 114 inf = open(inname, "r") 115 115 records = inf.read().splitlines() 116 116 inf.close() 117 117 118 118 for record in records: 119 119 arg = jobfile.parse_arg(record) 120 120 if (not arg): 121 121 return False 122 122 123 123 if (len(arg) < 6): 124 124 print("Not enough jobfile record arguments") 125 125 return False 126 126 127 127 srcfname = arg[0] 128 128 tgtfname = arg[1] … … 131 131 base = arg[4] 132 132 options = arg[5] 133 133 134 134 srcfqname = os.path.join(base, srcfname) 135 135 if (not os.path.isfile(srcfqname)): 136 136 print("Source %s not found" % srcfqname) 137 137 return False 138 138 139 139 tmpfname = "%s.preproc" % srcfname 140 140 tmpfqname = os.path.join(base, tmpfname) 141 141 142 142 vccfname = "%s.i" % srcfname 143 143 vccfqname = os.path.join(base, vccfname); 144 144 145 145 # Only C files are interesting for us 146 146 if (tool != "cc"): 147 147 continue 148 148 149 149 # Preprocess sources 150 150 151 151 if (not preprocess(srcfname, tmpfname, base, options)): 152 152 return False 153 153 154 154 # Run Vcc 155 155 print(" -- %s --" % srcfname) 156 156 retval = subprocess.Popen([vcc_path, '/pointersize:32', '/newsyntax', cygpath(tmpfqname)]).wait() 157 157 158 158 if (retval != 0): 159 159 return False 160 160 161 161 # Cleanup, but only if verification was successful 162 162 # (to be able to examine the preprocessed file) 163 163 164 164 if (os.path.isfile(tmpfqname)): 165 165 os.remove(tmpfqname) 166 166 os.remove(vccfqname) 167 167 168 168 return True 169 169 170 170 def main(): 171 171 global specification 172 172 173 173 if (len(sys.argv) < 2): 174 174 usage(sys.argv[0]) 175 175 return 176 176 177 177 rootdir = os.path.abspath(sys.argv[1]) 178 178 if (len(sys.argv) > 2): … … 180 180 else: 181 181 vcc_path = "/cygdrive/c/Program Files (x86)/Microsoft Research/Vcc/Binaries/vcc" 182 182 183 183 if (not os.path.isfile(vcc_path)): 184 184 print("%s is not a binary." % vcc_path) 185 185 print("Please supply the full Cygwin path to Vcc as the second argument.") 186 186 return 187 187 188 188 config = os.path.join(rootdir, "HelenOS.config") 189 189 190 190 if (not os.path.isfile(config)): 191 191 print("%s not found." % config) 192 192 print("Please specify the path to HelenOS build tree root as the first argument.") 193 193 return 194 194 195 195 specpath = os.path.join(rootdir, "tools/checkers/vcc.h") 196 196 if (not os.path.isfile(specpath)): 197 197 print("%s not found." % config) 198 198 return 199 199 200 200 specfile = file(specpath, "r") 201 201 specification = specfile.read() 202 202 specfile.close() 203 203 204 204 for job in jobs: 205 205 if (not vcc(vcc_path, rootdir, job)): … … 207 207 print("Failed job: %s" % job) 208 208 return 209 209 210 210 print() 211 211 print("All jobs passed") -
tools/config.py
r3061bc1 ra35b458 49 49 def read_config(fname, config): 50 50 "Read saved values from last configuration run or a preset file" 51 51 52 52 inf = open(fname, 'r') 53 53 54 54 for line in inf: 55 55 res = re.match(r'^(?:#!# )?([^#]\w*)\s*=\s*(.*?)\s*$', line) 56 56 if res: 57 57 config[res.group(1)] = res.group(2) 58 58 59 59 inf.close() 60 60 61 61 def check_condition(text, config, rules): 62 62 "Check that the condition specified on input line is True (only CNF and DNF is supported)" 63 63 64 64 ctype = 'cnf' 65 65 66 66 if (')|' in text) or ('|(' in text): 67 67 ctype = 'dnf' 68 68 69 69 if ctype == 'cnf': 70 70 conds = text.split('&') 71 71 else: 72 72 conds = text.split('|') 73 73 74 74 for cond in conds: 75 75 if cond.startswith('(') and cond.endswith(')'): 76 76 cond = cond[1:-1] 77 77 78 78 inside = check_inside(cond, config, ctype) 79 79 80 80 if (ctype == 'cnf') and (not inside): 81 81 return False 82 82 83 83 if (ctype == 'dnf') and inside: 84 84 return True 85 85 86 86 if ctype == 'cnf': 87 87 return True 88 88 89 89 return False 90 90 91 91 def check_inside(text, config, ctype): 92 92 "Check for condition" 93 93 94 94 if ctype == 'cnf': 95 95 conds = text.split('|') 96 96 else: 97 97 conds = text.split('&') 98 98 99 99 for cond in conds: 100 100 res = re.match(r'^(.*?)(!?=)(.*)$', cond) 101 101 if not res: 102 102 raise RuntimeError("Invalid condition: %s" % cond) 103 103 104 104 condname = res.group(1) 105 105 oper = res.group(2) 106 106 condval = res.group(3) 107 107 108 108 if not condname in config: 109 109 varval = '' … … 112 112 if (varval == '*'): 113 113 varval = 'y' 114 114 115 115 if ctype == 'cnf': 116 116 if (oper == '=') and (condval == varval): 117 117 return True 118 118 119 119 if (oper == '!=') and (condval != varval): 120 120 return True … … 122 122 if (oper == '=') and (condval != varval): 123 123 return False 124 124 125 125 if (oper == '!=') and (condval == varval): 126 126 return False 127 127 128 128 if ctype == 'cnf': 129 129 return False 130 130 131 131 return True 132 132 133 133 def parse_rules(fname, rules): 134 134 "Parse rules file" 135 135 136 136 inf = open(fname, 'r') 137 137 138 138 name = '' 139 139 choices = [] 140 140 141 141 for line in inf: 142 142 143 143 if line.startswith('!'): 144 144 # Ask a question 145 145 res = re.search(r'!\s*(?:\[(.*?)\])?\s*([^\s]+)\s*\((.*)\)\s*$', line) 146 146 147 147 if not res: 148 148 raise RuntimeError("Weird line: %s" % line) 149 149 150 150 cond = res.group(1) 151 151 varname = res.group(2) 152 152 vartype = res.group(3) 153 153 154 154 rules.append((varname, vartype, name, choices, cond)) 155 155 name = '' 156 156 choices = [] 157 157 continue 158 158 159 159 if line.startswith('@'): 160 160 # Add new line into the 'choices' array 161 161 res = re.match(r'@\s*(?:\[(.*?)\])?\s*"(.*?)"\s*(.*)$', line) 162 162 163 163 if not res: 164 164 raise RuntimeError("Bad line: %s" % line) 165 165 166 166 choices.append((res.group(2), res.group(3))) 167 167 continue 168 168 169 169 if line.startswith('%'): 170 170 # Name of the option 171 171 name = line[1:].strip() 172 172 continue 173 173 174 174 if line.startswith('#') or (line == '\n'): 175 175 # Comment or empty line 176 176 continue 177 178 177 178 179 179 raise RuntimeError("Unknown syntax: %s" % line) 180 180 181 181 inf.close() 182 182 183 183 def yes_no(default): 184 184 "Return '*' if yes, ' ' if no" 185 185 186 186 if default == 'y': 187 187 return '*' 188 188 189 189 return ' ' 190 190 191 191 def subchoice(screen, name, choices, default): 192 192 "Return choice of choices" 193 193 194 194 maxkey = 0 195 195 for key, val in choices: … … 197 197 if (length > maxkey): 198 198 maxkey = length 199 199 200 200 options = [] 201 201 position = None … … 204 204 if (default) and (key == default): 205 205 position = cnt 206 206 207 207 options.append(" %-*s %s " % (maxkey, key, val)) 208 208 cnt += 1 209 209 210 210 (button, value) = xtui.choice_window(screen, name, 'Choose value', options, position) 211 211 212 212 if button == 'cancel': 213 213 return None 214 214 215 215 return choices[value][0] 216 216 … … 228 228 def infer_verify_choices(config, rules): 229 229 "Infer and verify configuration values." 230 230 231 231 for rule in rules: 232 232 varname, vartype, name, choices, cond = rule 233 233 234 234 if cond and (not check_condition(cond, config, rules)): 235 235 continue 236 236 237 237 if not varname in config: 238 238 value = None 239 239 else: 240 240 value = config[varname] 241 241 242 242 if not validate_rule_value(rule, value): 243 243 value = None 244 244 245 245 default = get_default_rule(rule) 246 246 247 247 # 248 248 # If we don't have a value but we do have … … 252 252 value = default 253 253 config[varname] = default 254 254 255 255 if not varname in config: 256 256 return False 257 257 258 258 return True 259 259 … … 275 275 if start_index >= len(rules): 276 276 return True 277 277 278 278 varname, vartype, name, choices, cond = rules[start_index] 279 279 … … 282 282 if not check_condition(cond, config, rules): 283 283 return random_choices(config, rules, start_index + 1) 284 284 285 285 # Remember previous choices for backtracking 286 286 yes_no = 0 287 287 choices_indexes = range(0, len(choices)) 288 288 random.shuffle(choices_indexes) 289 289 290 290 # Remember current configuration value 291 291 old_value = None … … 294 294 except KeyError: 295 295 old_value = None 296 296 297 297 # For yes/no choices, we ran the loop at most 2 times, for select 298 298 # choices as many times as there are options. … … 320 320 else: 321 321 raise RuntimeError("Unknown variable type: %s" % vartype) 322 322 323 323 config[varname] = value 324 324 325 325 ok = random_choices(config, rules, start_index + 1) 326 326 if ok: 327 327 return True 328 328 329 329 try_counter = try_counter + 1 330 330 331 331 # Restore the old value and backtrack 332 332 # (need to delete to prevent "ghost" variables that do not exist under … … 335 335 if old_value is None: 336 336 del config[varname] 337 337 338 338 return random_choices(config, rules, start_index + 1) 339 339 340 340 341 341 ## Get default value from a rule. 342 342 def get_default_rule(rule): 343 343 varname, vartype, name, choices, cond = rule 344 344 345 345 default = None 346 346 347 347 if vartype == 'choice': 348 348 # If there is just one option, use it … … 359 359 else: 360 360 raise RuntimeError("Unknown variable type: %s" % vartype) 361 361 362 362 return default 363 363 … … 371 371 def get_rule_option(rule, value): 372 372 varname, vartype, name, choices, cond = rule 373 373 374 374 option = None 375 375 376 376 if vartype == 'choice': 377 377 # If there is just one option, don't ask … … 391 391 else: 392 392 raise RuntimeError("Unknown variable type: %s" % vartype) 393 393 394 394 return option 395 395 … … 403 403 def validate_rule_value(rule, value): 404 404 varname, vartype, name, choices, cond = rule 405 405 406 406 if value == None: 407 407 return True 408 408 409 409 if vartype == 'choice': 410 410 if not value in [choice[0] for choice in choices]: … … 424 424 else: 425 425 raise RuntimeError("Unknown variable type: %s" % vartype) 426 426 427 427 return True 428 428 429 429 def preprocess_config(config, rules): 430 430 "Preprocess configuration" 431 431 432 432 varname_mode = 'CONFIG_BFB_MODE' 433 433 varname_width = 'CONFIG_BFB_WIDTH' 434 434 varname_height = 'CONFIG_BFB_HEIGHT' 435 435 436 436 if varname_mode in config: 437 437 mode = config[varname_mode].partition('x') 438 438 439 439 config[varname_width] = mode[0] 440 440 rules.append((varname_width, 'choice', 'Default framebuffer width', None, None)) 441 441 442 442 config[varname_height] = mode[2] 443 443 rules.append((varname_height, 'choice', 'Default framebuffer height', None, None)) … … 445 445 def create_output(mkname, mcname, config, rules): 446 446 "Create output configuration" 447 447 448 448 varname_strip = 'CONFIG_STRIP_REVISION_INFO' 449 449 strip_rev_info = (varname_strip in config) and (config[varname_strip] == 'y') 450 450 451 451 if strip_rev_info: 452 452 timestamp_unix = int(0) … … 454 454 # TODO: Use commit timestamp instead of build time. 455 455 timestamp_unix = int(time.time()) 456 456 457 457 timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(timestamp_unix)) 458 458 459 459 sys.stderr.write("Fetching current revision identifier ... ") 460 460 461 461 try: 462 462 version = subprocess.Popen(['git', 'log', '-1', '--pretty=%h'], stdout = subprocess.PIPE).communicate()[0].decode().strip() … … 465 465 version = None 466 466 sys.stderr.write("failed\n") 467 467 468 468 if (not strip_rev_info) and (version is not None): 469 469 revision = version 470 470 else: 471 471 revision = None 472 472 473 473 outmk = open(mkname, 'w') 474 474 outmc = open(mcname, 'w') 475 475 476 476 outmk.write('#########################################\n') 477 477 outmk.write('## AUTO-GENERATED FILE, DO NOT EDIT!!! ##\n') 478 478 outmk.write('## Generated by: tools/config.py ##\n') 479 479 outmk.write('#########################################\n\n') 480 480 481 481 outmc.write('/***************************************\n') 482 482 outmc.write(' * AUTO-GENERATED FILE, DO NOT EDIT!!! *\n') 483 483 outmc.write(' * Generated by: tools/config.py *\n') 484 484 outmc.write(' ***************************************/\n\n') 485 485 486 486 defs = 'CONFIG_DEFS =' 487 487 488 488 for varname, vartype, name, choices, cond in rules: 489 489 if cond and (not check_condition(cond, config, rules)): 490 490 continue 491 491 492 492 if not varname in config: 493 493 value = '' … … 496 496 if (value == '*'): 497 497 value = 'y' 498 498 499 499 outmk.write('# %s\n%s = %s\n\n' % (name, varname, value)) 500 500 501 501 if vartype in ["y", "n", "y/n", "n/y"]: 502 502 if value == "y": … … 506 506 outmc.write('/* %s */\n#define %s %s\n#define %s_%s\n\n' % (name, varname, value, varname, value)) 507 507 defs += ' -D%s=%s -D%s_%s' % (varname, value, varname, value) 508 508 509 509 if revision is not None: 510 510 outmk.write('REVISION = %s\n' % revision) 511 511 outmc.write('#define REVISION %s\n' % revision) 512 512 defs += ' "-DREVISION=%s"' % revision 513 513 514 514 outmk.write('TIMESTAMP_UNIX = %d\n' % timestamp_unix) 515 515 outmc.write('#define TIMESTAMP_UNIX %d\n' % timestamp_unix) 516 516 defs += ' "-DTIMESTAMP_UNIX=%d"\n' % timestamp_unix 517 517 518 518 outmk.write('TIMESTAMP = %s\n' % timestamp) 519 519 outmc.write('#define TIMESTAMP %s\n' % timestamp) 520 520 defs += ' "-DTIMESTAMP=%s"\n' % timestamp 521 521 522 522 outmk.write(defs) 523 523 524 524 outmk.close() 525 525 outmc.close() … … 536 536 opt2path = {} 537 537 cnt = 0 538 538 539 539 # Look for profiles 540 540 for name in sorted_dir(root): 541 541 path = os.path.join(root, name) 542 542 canon = os.path.join(path, fname) 543 543 544 544 if os.path.isdir(path) and os.path.exists(canon) and os.path.isfile(canon): 545 545 subprofile = False 546 546 547 547 # Look for subprofiles 548 548 for subname in sorted_dir(path): 549 549 subpath = os.path.join(path, subname) 550 550 subcanon = os.path.join(subpath, fname) 551 551 552 552 if os.path.isdir(subpath) and os.path.exists(subcanon) and os.path.isfile(subcanon): 553 553 subprofile = True … … 555 555 opt2path[cnt] = [name, subname] 556 556 cnt += 1 557 557 558 558 if not subprofile: 559 559 options.append(name) 560 560 opt2path[cnt] = [name] 561 561 cnt += 1 562 562 563 563 (button, value) = xtui.choice_window(screen, 'Load preconfigured defaults', 'Choose configuration profile', options, None) 564 564 565 565 if button == 'cancel': 566 566 return None 567 567 568 568 return opt2path[value] 569 569 … … 576 576 path = os.path.join(PRESETS_DIR, profile[0], MAKEFILE) 577 577 read_config(path, config) 578 578 579 579 if len(profile) > 1: 580 580 path = os.path.join(PRESETS_DIR, profile[0], profile[1], MAKEFILE) … … 588 588 def parse_profile_name(profile_name): 589 589 profile = [] 590 590 591 591 head, tail = os.path.split(profile_name) 592 592 if head != '': 593 593 profile.append(head) 594 594 595 595 profile.append(tail) 596 596 return profile … … 600 600 config = {} 601 601 rules = [] 602 602 603 603 # Parse rules file 604 604 parse_rules(RULES_FILE, rules) 605 605 606 606 # Input configuration file can be specified on command line 607 607 # otherwise configuration from previous run is used. … … 611 611 elif os.path.exists(MAKEFILE): 612 612 read_config(MAKEFILE, config) 613 613 614 614 # Default mode: check values and regenerate configuration files 615 615 if (len(sys.argv) >= 3) and (sys.argv[2] == 'default'): … … 618 618 create_output(MAKEFILE, MACROS, config, rules) 619 619 return 0 620 620 621 621 # Hands-off mode: check values and regenerate configuration files, 622 622 # but no interactive fallback … … 627 627 sys.stderr.write("Configuration error: No presets specified\n") 628 628 return 2 629 629 630 630 if (infer_verify_choices(config, rules)): 631 631 preprocess_config(config, rules) 632 632 create_output(MAKEFILE, MACROS, config, rules) 633 633 return 0 634 634 635 635 sys.stderr.write("Configuration error: The presets are ambiguous\n") 636 636 return 1 637 637 638 638 # Check mode: only check configuration 639 639 if (len(sys.argv) >= 3) and (sys.argv[2] == 'check'): … … 641 641 return 0 642 642 return 1 643 643 644 644 # Random mode 645 645 if (len(sys.argv) == 3) and (sys.argv[2] == 'random'): … … 653 653 preprocess_config(config, rules) 654 654 create_output(MAKEFILE, MACROS, config, rules) 655 655 656 656 return 0 657 657 658 658 screen = xtui.screen_init() 659 659 try: … … 661 661 position = None 662 662 while True: 663 663 664 664 # Cancel out all values which have to be deduced 665 665 for varname, vartype, name, choices, cond in rules: 666 666 if (vartype == 'y') and (varname in config) and (config[varname] == '*'): 667 667 config[varname] = None 668 668 669 669 options = [] 670 670 opt2row = {} 671 671 cnt = 1 672 672 673 673 options.append(" --- Load preconfigured defaults ... ") 674 674 675 675 for rule in rules: 676 676 varname, vartype, name, choices, cond = rule 677 677 678 678 if cond and (not check_condition(cond, config, rules)): 679 679 continue 680 680 681 681 if varname == selname: 682 682 position = cnt 683 683 684 684 if not varname in config: 685 685 value = None 686 686 else: 687 687 value = config[varname] 688 688 689 689 if not validate_rule_value(rule, value): 690 690 value = None 691 691 692 692 default = get_default_rule(rule) 693 693 694 694 # 695 695 # If we don't have a value but we do have … … 699 699 value = default 700 700 config[varname] = default 701 701 702 702 option = get_rule_option(rule, value) 703 703 if option != None: … … 705 705 else: 706 706 continue 707 707 708 708 opt2row[cnt] = (varname, vartype, name, choices) 709 709 710 710 cnt += 1 711 711 712 712 if (position != None) and (position >= len(options)): 713 713 position = None 714 714 715 715 (button, value) = xtui.choice_window(screen, 'HelenOS configuration', 'Choose configuration option', options, position) 716 716 717 717 if button == 'cancel': 718 718 return 'Configuration canceled' 719 719 720 720 if button == 'done': 721 721 if (infer_verify_choices(config, rules)): … … 724 724 xtui.error_dialog(screen, 'Error', 'Some options have still undefined values. These options are marked with the "?" sign.') 725 725 continue 726 726 727 727 if value == 0: 728 728 profile = choose_profile(PRESETS_DIR, MAKEFILE, screen, config) … … 731 731 position = 1 732 732 continue 733 733 734 734 position = None 735 735 if not value in opt2row: 736 736 raise RuntimeError("Error selecting value: %s" % value) 737 737 738 738 (selname, seltype, name, choices) = opt2row[value] 739 739 740 740 if not selname in config: 741 741 value = None 742 742 else: 743 743 value = config[selname] 744 744 745 745 if seltype == 'choice': 746 746 config[selname] = subchoice(screen, name, choices, value) … … 752 752 finally: 753 753 xtui.screen_done(screen) 754 754 755 755 preprocess_config(config, rules) 756 756 create_output(MAKEFILE, MACROS, config, rules) -
tools/dest_build.py
r3061bc1 ra35b458 42 42 def duplicate_tree(src_path, dest_path, current): 43 43 "Duplicate source directory tree in the destination path" 44 44 45 45 for name in os.listdir(os.path.join(src_path, current)): 46 46 if name in exclude_names: 47 47 next 48 48 49 49 following = os.path.join(current, name) 50 50 src = os.path.join(src_path, following) … … 52 52 dest_parent = os.path.join(dest_path, current) 53 53 dest_stat = os.stat(src) 54 54 55 55 # Create shadow directories 56 56 if stat.S_ISDIR(dest_stat.st_mode): 57 57 if not os.path.exists(dest): 58 58 os.mkdir(dest) 59 59 60 60 if not os.path.isdir(dest): 61 61 raise IOError(errno.ENOTDIR, "Destination path exists, but is not a directory", dest) 62 62 63 63 duplicate_tree(src_path, dest_path, following) 64 64 else: 65 65 # Compute the relative path from destination to source 66 66 relative = os.path.relpath(src, dest_parent) 67 67 68 68 # Create symlink 69 69 if not os.path.exists(dest): … … 78 78 usage(sys.argv[0]) 79 79 return 1 80 80 81 81 # Source tree path 82 82 src_path = os.path.abspath(sys.argv[1]) … … 84 84 print("<SRC_PATH> must be a directory") 85 85 return 2 86 86 87 87 # Destination tree path 88 88 dest_path = os.path.abspath(sys.argv[2]) 89 89 if not os.path.exists(dest_path): 90 90 os.mkdir(dest_path) 91 91 92 92 if not os.path.isdir(dest_path): 93 93 print("<DEST_PATH> must be a directory") 94 94 return 3 95 95 96 96 # Duplicate source directory tree 97 97 duplicate_tree(src_path, dest_path, "") 98 98 99 99 # Run the build from the destination tree 100 100 os.chdir(dest_path) 101 101 args = ["make"] 102 102 args.extend(sys.argv[3:]) 103 103 104 104 return subprocess.Popen(args).wait() 105 105 -
tools/ew.py
r3061bc1 ra35b458 76 76 def pc_options(guest_width): 77 77 opts = '' 78 78 79 79 # Do not enable KVM if running 64 bits HelenOS 80 80 # on 32 bits host … … 82 82 if guest_width <= host_width and not is_override('nokvm'): 83 83 opts = opts + ' -enable-kvm' 84 84 85 85 # Remove the leading space 86 86 return opts[1:] … … 132 132 if is_override('nohdd'): 133 133 return '' 134 134 135 135 hdisk_mk() 136 136 137 137 return ' -drive file=hdisk.img,index=0,media=disk,format=raw' 138 138 … … 208 208 if (not 'audio' in cfg.keys()) or cfg['audio']: 209 209 cmdline += qemu_audio_options() 210 210 211 211 if cfg['image'] == 'image.iso': 212 212 cmdline += ' -boot d -cdrom image.iso' … … 328 328 def fail(platform, machine): 329 329 print("Cannot start emulation for the chosen configuration. (%s/%s)" % (platform, machine)) 330 330 331 331 332 332 def run(): -
tools/imgutil.py
r3061bc1 ra35b458 40 40 def align_up(size, alignment): 41 41 "Return size aligned up to alignment" 42 42 43 43 if (size % alignment == 0): 44 44 return size 45 45 46 46 return ((size // alignment) + 1) * alignment 47 47 48 48 def count_up(size, alignment): 49 49 "Return units necessary to fit the size" 50 50 51 51 if (size % alignment == 0): 52 52 return (size // alignment) 53 53 54 54 return ((size // alignment) + 1) 55 55 56 56 def num_of_trailing_bin_zeros(num): 57 57 "Return number of trailing zeros in binary representation" 58 58 59 59 i = 0 60 60 if (num == 0): raise ValueError() … … 66 66 def get_bit(number, n): 67 67 "Return True if n-th least-significant bit is set in the given number" 68 68 69 69 return bool((number >> n) & 1) 70 70 71 71 def set_bit(number, n): 72 72 "Return the number with n-th least-significant bit set" 73 73 74 74 return number | (1 << n) 75 75 76 76 class ItemToPack: 77 77 "Stores information about one directory item to be added to the image" 78 78 79 79 def __init__(self, parent, name): 80 80 self.parent = parent … … 88 88 def listdir_items(path): 89 89 "Return a list of items to be packed inside a fs image" 90 90 91 91 for name in os.listdir(path): 92 92 if name in exclude_names: 93 93 continue 94 94 95 95 item = ItemToPack(path, name) 96 96 97 97 if not (item.is_dir or item.is_file): 98 98 continue 99 99 100 100 yield item 101 101 102 102 def chunks(item, chunk_size): 103 103 "Iterate contents of a file in chunks of a given size" 104 104 105 105 inf = open(item.path, 'rb') 106 106 rd = 0 107 107 108 108 while (rd < item.size): 109 109 data = bytes(inf.read(chunk_size)) 110 110 yield data 111 111 rd += len(data) 112 112 113 113 inf.close() -
tools/jobfile.py
r3061bc1 ra35b458 44 44 usage(sys.argv[0]) 45 45 return 46 46 47 47 jobfname = sys.argv[1] 48 48 ccname = sys.argv[2] … … 53 53 options = " ".join(sys.argv[6:]) 54 54 cwd = os.getcwd() 55 55 56 56 if srcfname.endswith(".c"): 57 57 toolname = "cc" 58 58 category = "core" 59 59 60 60 if srcfname.endswith(".s"): 61 61 toolname = "as" 62 62 category = "asm" 63 63 64 64 if srcfname.endswith(".S"): 65 65 toolname = "as" 66 66 category = "asm/preproc" 67 67 68 68 jobfile = open(jobfname, "a") 69 69 fcntl.lockf(jobfile, fcntl.LOCK_EX) … … 71 71 fcntl.lockf(jobfile, fcntl.LOCK_UN) 72 72 jobfile.close() 73 73 74 74 # Run the compiler proper. 75 75 os.execvp(ccname, sys.argv[2:]) -
tools/mkarray.py
r3061bc1 ra35b458 57 57 def main(): 58 58 arg_check() 59 59 60 60 if sys.argv[1] == "--deflate": 61 61 sys.argv.pop(1) … … 64 64 else: 65 65 compress = False 66 66 67 67 dest = sys.argv[1] 68 68 label = sys.argv[2] 69 69 as_prolog = sys.argv[3] 70 70 section = sys.argv[4] 71 71 72 72 timestamp = (1980, 1, 1, 0, 0, 0) 73 73 74 74 header_ctx = [] 75 75 desc_ctx = [] 76 76 size_ctx = [] 77 77 data_ctx = [] 78 78 79 79 src_cnt = 0 80 80 81 81 archive = zipfile.ZipFile("%s.zip" % dest, "w", zipfile.ZIP_STORED) 82 82 83 83 for src in sys.argv[5:]: 84 84 basename = os.path.basename(src) 85 85 plainname = os.path.splitext(basename)[0] 86 86 symbol = basename.replace(".", "_") 87 87 88 88 print("%s -> %s" % (src, symbol)) 89 89 90 90 src_in = open(src, "rb") 91 91 src_data = src_in.read() 92 92 src_in.close() 93 93 94 94 length = len(src_data) 95 95 96 96 if compress: 97 97 src_data = deflate(src_data) … … 101 101 else: 102 102 src_fname = src 103 103 104 104 if sys.version_info < (3,): 105 105 src_data = bytearray(src_data) 106 106 107 107 length_out = len(src_data) 108 108 109 109 header_ctx.append("extern uint8_t %s[];" % symbol) 110 110 header_ctx.append("extern size_t %s_size;" % symbol) 111 111 112 112 data_ctx.append(".globl %s" % symbol) 113 113 data_ctx.append(".balign 8") … … 115 115 data_ctx.append("%s:" % symbol) 116 116 data_ctx.append("\t.incbin \"%s\"\n" % src_fname) 117 117 118 118 desc_field = [] 119 119 desc_field.append("\t{") … … 122 122 desc_field.append("\t\t.size = %u," % length_out) 123 123 desc_field.append("\t\t.inflated = %u," % length) 124 124 125 125 if compress: 126 126 desc_field.append("\t\t.compressed = true") 127 127 else: 128 128 desc_field.append("\t\t.compressed = false") 129 129 130 130 desc_field.append("\t}") 131 131 132 132 desc_ctx.append("\n".join(desc_field)) 133 133 134 134 size_ctx.append("size_t %s_size = %u;" % (symbol, length_out)) 135 135 136 136 src_cnt += 1 137 137 138 138 data = '' 139 139 data += '/***************************************\n' … … 160 160 zipinfo = zipfile.ZipInfo("%s.h" % dest, timestamp) 161 161 archive.writestr(zipinfo, data) 162 162 163 163 data = '' 164 164 data += '/***************************************\n' … … 172 172 zipinfo = zipfile.ZipInfo("%s.s" % dest, timestamp) 173 173 archive.writestr(zipinfo, data) 174 174 175 175 data = '' 176 176 data += '/***************************************\n' … … 187 187 zipinfo = zipfile.ZipInfo("%s_desc.c" % dest, timestamp) 188 188 archive.writestr(zipinfo, data) 189 189 190 190 archive.close() 191 191 -
tools/mkext2.py
r3061bc1 ra35b458 77 77 uint32_t rev_major /* Major revision level */ 78 78 padding[4] /* default reserved uid and gid */ 79 79 80 80 /* Following is for ext2 revision 1 only */ 81 81 uint32_t first_inode … … 130 130 def __init__(self, filename, block_groups, blocks_per_group, inodes_per_group, block_size, inode_size, reserved_inode_count): 131 131 "Initialize the filesystem writer" 132 132 133 133 outf = open(filename, "w+b") 134 134 # Set the correct size of the image, so that we can read arbitrary position … … 172 172 lpf_dir.add(self.root_inode.as_dirent('..')) 173 173 lpf_dir.finish() 174 174 175 175 def init_gdt(self): 176 176 "Initialize block group descriptor table" 177 177 178 178 self.superblock_positions = [] 179 179 self.gdt = [] … … 202 202 gde.directory_inode_count = 0 203 203 self.gdt.append(gde) 204 204 205 205 def mark_block_cb(self, block): 206 206 "Called after a block has been allocated" 207 207 208 208 self.gdt[block // self.blocks_per_group].free_block_count -= 1 209 209 210 210 def mark_inode_cb(self, index, directory=False): 211 211 "Called after an inode has been allocated" 212 212 213 213 index -= 1 214 214 gde = self.gdt[index // self.inodes_per_group] … … 216 216 if directory: 217 217 gde.directory_inode_count += 1 218 218 219 219 def seek_to_block(self, block, offset=0): 220 220 "Seek to offset bytes after the start of the given block" 221 221 222 222 if offset < 0 or offset > self.block_size: 223 223 raise Exception("Invalid in-block offset") 224 224 self.outf.seek(block * self.block_size + offset) 225 225 226 226 def seek_to_inode(self, index): 227 227 "Seek to the start of the inode structure for the inode number index" 228 228 229 229 index -= 1 230 230 if index < 0: … … 235 235 block = base_block + (offset // self.block_size) 236 236 self.seek_to_block(block, offset % self.block_size) 237 237 238 238 def subtree_add(self, inode, parent_inode, dirpath, is_root=False): 239 239 "Recursively add files to the filesystem" 240 240 241 241 dir_writer = DirWriter(inode) 242 242 dir_writer.add(inode.as_dirent('.')) 243 243 dir_writer.add(parent_inode.as_dirent('..')) 244 244 245 245 if is_root: 246 246 dir_writer.add(self.lost_plus_found.as_dirent('lost+found')) … … 255 255 child_inode = Inode(self, newidx, Inode.TYPE_DIR) 256 256 self.subtree_add(child_inode, inode, item.path) 257 257 258 258 dir_writer.add(child_inode.as_dirent(item.name)) 259 259 self.write_inode(child_inode) 260 260 261 261 dir_writer.finish() 262 262 263 263 def write_inode(self, inode): 264 264 "Write inode information into the inode table" 265 265 266 266 self.seek_to_inode(inode.index) 267 267 self.outf.write(inode.pack()) … … 269 269 def write_gdt(self): 270 270 "Write group descriptor table at the current file position" 271 271 272 272 for gde in self.gdt: 273 273 data = bytes(gde.pack()) 274 274 self.outf.write(data) 275 275 self.outf.seek(GDE_SIZE-len(data), os.SEEK_CUR) 276 276 277 277 def write_superblock(self, block_group): 278 278 "Write superblock at the current position" 279 279 280 280 sb = xstruct.create(STRUCT_SUPERBLOCK) 281 281 sb.total_inode_count = self.total_inode_count … … 312 312 sb.volume_name = 'HelenOS rdimage\0' 313 313 self.outf.write(bytes(sb.pack())) 314 314 315 315 def write_all_metadata(self): 316 316 "Write superblocks, block group tables, block and inode bitmaps" 317 317 318 318 bbpg = self.blocks_per_group // 8 319 319 bipg = self.inodes_per_group // 8 320 320 def window(arr, index, size): 321 321 return arr[index * size:(index + 1) * size] 322 322 323 323 for bg_index in xrange(len(self.gdt)): 324 324 sbpos = self.superblock_positions[bg_index] 325 325 sbblock = (sbpos + 1023) // self.block_size 326 326 gde = self.gdt[bg_index] 327 327 328 328 self.outf.seek(sbpos) 329 329 self.write_superblock(bg_index) 330 330 331 331 self.seek_to_block(sbblock+1) 332 332 self.write_gdt() 333 333 334 334 self.seek_to_block(gde.block_bitmap_block) 335 335 self.outf.write(window(self.block_allocator.bitmap, bg_index, bbpg)) 336 336 337 337 self.seek_to_block(gde.inode_bitmap_block) 338 338 self.outf.write(window(self.inode_allocator.bitmap, bg_index, bipg)) 339 339 340 340 def close(self): 341 341 "Write all remaining data to the filesystem and close the file" 342 342 343 343 self.write_inode(self.root_inode) 344 344 self.write_inode(self.lost_plus_found) … … 354 354 self.bitmap = array.array('B', [0] * (count // 8)) 355 355 self.mark_cb = None 356 356 357 357 def __contains__(self, item): 358 358 "Check if the item is already used" 359 359 360 360 bitidx = item - self.base 361 361 return get_bit(self.bitmap[bitidx // 8], bitidx % 8) 362 362 363 363 def alloc(self, **options): 364 364 "Allocate a new item" 365 365 366 366 while self.nextidx < self.count and (self.base + self.nextidx) in self: 367 367 self.nextidx += 1 … … 372 372 self.mark_used(item, **options) 373 373 return item 374 374 375 375 def mark_used(self, item, **options): 376 376 "Mark the specified item as used" 377 377 378 378 bitidx = item - self.base 379 379 if item in self: … … 384 384 if self.mark_cb: 385 385 self.mark_cb(item, **options) 386 386 387 387 def mark_used_all(self, items, **options): 388 388 "Mark all specified items as used" 389 389 390 390 for item in items: 391 391 self.mark_used(item, **options) … … 395 395 TYPE_DIR = 2 396 396 TYPE2MODE = {TYPE_FILE: 8, TYPE_DIR: 4} 397 397 398 398 def __init__(self, fs, index, typ): 399 399 self.fs = fs … … 406 406 self.type = typ 407 407 self.refcount = 0 408 408 409 409 def as_dirent(self, name): 410 410 "Return a DirEntry corresponding to this inode" 411 411 self.refcount += 1 412 412 return DirEntry(name, self.index, self.type) 413 413 414 414 def new_block(self, data=True): 415 415 "Get a new block index from allocator and count it here as belonging to the file" 416 416 417 417 block = self.fs.block_allocator.alloc() 418 418 self.blocks += 1 419 419 return block 420 420 421 421 def get_or_add_block(self, block): 422 422 "Get or add a real block to the file" 423 423 424 424 if block < 12: 425 425 return self.get_or_add_block_direct(block) 426 426 return self.get_or_add_block_indirect(block) 427 427 428 428 def get_or_add_block_direct(self, block): 429 429 "Get or add a real block to the file (direct blocks)" 430 430 431 431 if self.direct[block] == None: 432 432 self.direct[block] = self.new_block() 433 433 return self.direct[block] 434 434 435 435 def get_or_add_block_indirect(self, block): 436 436 "Get or add a real block to the file (indirect blocks)" 437 437 438 438 # Determine the indirection level for the desired block 439 439 level = None … … 444 444 445 445 assert level != None 446 446 447 447 # Compute offsets for the topmost level 448 448 block_offset_in_level = block - self.fs.indirect_limits[level-1]; … … 452 452 current_block.block_id = self.indirect[level-1] 453 453 offset_in_block = block_offset_in_level // self.fs.indirect_blocks_per_level[level-1] 454 454 455 455 # Navigate through other levels 456 456 while level > 0: 457 457 assert offset_in_block < self.fs.block_ids_per_block 458 458 459 459 level -= 1 460 460 461 461 self.fs.seek_to_block(current_block.block_id, offset_in_block*4) 462 462 current_block.unpack(self.fs.outf.read(4)) 463 463 464 464 if current_block.block_id == 0: 465 465 # The block does not exist, so alloc one and write it there … … 467 467 current_block.block_id = self.new_block(data=(level==0)) 468 468 self.fs.outf.write(current_block.pack()) 469 469 470 470 # If we are on the last level, break here as 471 471 # there is no next level to visit 472 472 if level == 0: 473 473 break 474 474 475 475 # Visit the next level 476 476 block_offset_in_level %= self.fs.indirect_blocks_per_level[level]; … … 478 478 479 479 return current_block.block_id 480 480 481 481 def do_seek(self): 482 482 "Perform a seek to the position indicated by self.pos" 483 483 484 484 block = self.pos // self.fs.block_size 485 485 real_block = self.get_or_add_block(block) 486 486 offset = self.pos % self.fs.block_size 487 487 self.fs.seek_to_block(real_block, offset) 488 488 489 489 def write(self, data): 490 490 "Write a piece of data (arbitrarily long) as the contents of the inode" 491 491 492 492 data_pos = 0 493 493 while data_pos < len(data): … … 499 499 data_pos += bytes_to_write 500 500 self.size = max(self.pos, self.size) 501 501 502 502 def align_size_to_block(self): 503 503 "Align the size of the inode up to block size" 504 504 505 505 self.size = align_up(self.size, self.fs.block_size) 506 506 507 507 def align_pos(self, bytes): 508 508 "Align the current position up to bytes boundary" 509 509 510 510 self.pos = align_up(self.pos, bytes) 511 511 512 512 def set_pos(self, pos): 513 513 "Set the current position" 514 514 515 515 self.pos = pos 516 516 517 517 def pack(self): 518 518 "Pack the inode structure and return the result" 519 519 520 520 data = xstruct.create(STRUCT_INODE) 521 521 data.mode = (Inode.TYPE2MODE[self.type] << 12) … … 546 546 data.group_id_high = 0 547 547 return data.pack() 548 548 549 549 class DirEntry: 550 550 "Represents a linked list directory entry" 551 551 552 552 def __init__(self, name, inode, typ): 553 553 self.name = name.encode('UTF-8') … … 555 555 self.skip = None 556 556 self.type = typ 557 557 558 558 def size(self): 559 559 "Return size of the entry in bytes" 560 560 561 561 return align_up(8 + len(self.name)+1, 4) 562 562 563 563 def write(self, inode): 564 564 "Write the directory entry into the inode" 565 565 566 566 head = xstruct.create(STRUCT_DIR_ENTRY_HEAD) 567 567 head.inode = self.inode … … 575 575 class DirWriter: 576 576 "Manages writing directory entries into an inode (alignment, etc.)" 577 577 578 578 def __init__(self, inode): 579 579 self.pos = 0 … … 581 581 self.prev_entry = None 582 582 self.prev_pos = None 583 583 584 584 def prev_write(self): 585 585 "Write a previously remembered entry" 586 586 587 587 if self.prev_entry: 588 588 self.prev_entry.skip = self.pos - self.prev_pos … … 590 590 self.prev_entry.write(self.inode) 591 591 self.inode.set_pos(self.pos) 592 592 593 593 def add(self, entry): 594 594 "Add a directory entry to the directory" 595 595 596 596 size = entry.size() 597 597 block_size = self.inode.fs.block_size … … 602 602 self.prev_pos = self.pos 603 603 self.pos += size 604 604 605 605 def finish(self): 606 606 "Write the last entry and finish writing the directory contents" 607 607 608 608 if not self.inode: 609 609 return … … 614 614 def subtree_stats(root, block_size): 615 615 "Recursively calculate statistics" 616 616 617 617 blocks = 0 618 618 inodes = 1 619 619 dir_writer = DirWriter(None) 620 620 621 621 for item in listdir_items(root): 622 622 inodes += 1 … … 627 627 blocks += subtree_blocks 628 628 inodes += subtree_inodes 629 629 630 630 dir_writer.finish() 631 631 blocks += count_up(dir_writer.pos, block_size) … … 640 640 usage(sys.argv[0]) 641 641 return 642 642 643 643 if (not sys.argv[1].isdigit()): 644 644 print("<EXTRA_BYTES> must be a number") 645 645 return 646 646 647 647 extra_bytes = int(sys.argv[1]) 648 648 649 649 path = os.path.abspath(sys.argv[2]) 650 650 if (not os.path.isdir(path)): 651 651 print("<PATH> must be a directory") 652 652 return 653 653 654 654 block_size = 4096 655 655 inode_size = 128 … … 657 657 blocks_per_group = 1024 658 658 inodes_per_group = 512 659 659 660 660 blocks, inodes = subtree_stats(path, block_size) 661 661 blocks += count_up(extra_bytes, block_size) 662 662 inodes += reserved_inode_count 663 663 664 664 inodes_per_group = align_up(inodes_per_group, 8) 665 665 blocks_per_group = align_up(blocks_per_group, 8) 666 666 667 667 inode_table_blocks_per_group = (inodes_per_group * inode_size) // block_size 668 668 inode_bitmap_blocks_per_group = count_up(inodes_per_group // 8, block_size) … … 673 673 free_blocks_per_group -= block_bitmap_blocks_per_group 674 674 free_blocks_per_group -= 10 # one for SB and some reserve for GDT 675 675 676 676 block_groups = max(count_up(inodes, inodes_per_group), count_up(blocks, free_blocks_per_group)) 677 677 678 678 fs = Filesystem(sys.argv[3], block_groups, blocks_per_group, inodes_per_group, 679 679 block_size, inode_size, reserved_inode_count) 680 680 681 681 fs.subtree_add(fs.root_inode, fs.root_inode, path, is_root=True) 682 682 fs.close() 683 683 684 684 if __name__ == '__main__': 685 685 main() -
tools/mkfat.py
r3061bc1 ra35b458 41 41 def subtree_size(root, cluster_size, dirent_size): 42 42 "Recursive directory walk and calculate size" 43 43 44 44 size = 0 45 45 files = 2 46 46 47 47 for item in listdir_items(root): 48 48 if item.is_file: … … 52 52 size += subtree_size(item.path, cluster_size, dirent_size) 53 53 files += 1 54 54 55 55 return size + align_up(files * dirent_size, cluster_size) 56 56 57 57 def root_entries(root): 58 58 "Return number of root directory entries" 59 59 60 60 return len(os.listdir(root)) 61 61 62 62 def write_file(item, outf, cluster_size, data_start, fat, reserved_clusters): 63 63 "Store the contents of a file" 64 64 65 65 prev = -1 66 66 first = 0 67 67 68 68 for data in chunks(item, cluster_size): 69 69 empty_cluster = fat.index(0) 70 70 fat[empty_cluster] = 0xffff 71 71 72 72 if (prev != -1): 73 73 fat[prev] = empty_cluster 74 74 else: 75 75 first = empty_cluster 76 76 77 77 prev = empty_cluster 78 78 79 79 outf.seek(data_start + (empty_cluster - reserved_clusters) * cluster_size) 80 80 outf.write(data) 81 81 82 82 return first, item.size 83 83 84 84 def write_directory(directory, outf, cluster_size, data_start, fat, reserved_clusters, dirent_size, empty_cluster): 85 85 "Store the contents of a directory" 86 86 87 87 length = len(directory) 88 88 size = length * dirent_size 89 89 prev = -1 90 90 first = 0 91 91 92 92 i = 0 93 93 rd = 0; … … 99 99 else: 100 100 first = empty_cluster 101 101 102 102 prev = empty_cluster 103 103 104 104 data = bytes() 105 105 data_len = 0 … … 107 107 if (i == 0): 108 108 directory[i].cluster = empty_cluster 109 109 110 110 data += directory[i].pack() 111 111 data_len += dirent_size 112 112 i += 1 113 113 114 114 outf.seek(data_start + (empty_cluster - reserved_clusters) * cluster_size) 115 115 outf.write(data) 116 116 rd += len(data) 117 117 118 118 return first, size 119 119 … … 188 188 def fat_lchars(name): 189 189 "Filter FAT legal characters" 190 190 191 191 filtered_name = b'' 192 192 filtered = False 193 193 194 194 for char in name.encode('ascii', 'replace').upper(): 195 195 if char in lchars: … … 198 198 filtered_name += b'_' 199 199 filtered = True 200 200 201 201 return (filtered_name, filtered) 202 202 203 203 def fat_name83(name, name83_list): 204 204 "Create a 8.3 name for the given name" 205 205 206 206 ascii_name, lfn = fat_lchars(name) 207 207 # Splitting works only on strings, not on bytes 208 208 ascii_parts = ascii_name.decode('utf8').split('.') 209 209 210 210 short_name = '' 211 211 short_ext = '' 212 212 213 213 if len(ascii_name) > 11: 214 214 lfn = True 215 215 216 216 if len(ascii_parts) > 0: 217 217 short_name = ascii_parts[0] 218 218 if len(short_name) > 8: 219 219 lfn = True 220 220 221 221 if len(ascii_parts) > 1: 222 222 short_ext = ascii_parts[-1] 223 223 if len(short_ext) > 3: 224 224 lfn = True 225 225 226 226 if len(ascii_parts) > 2: 227 227 lfn = True 228 228 229 229 if lfn == False: 230 230 name83_list.append(short_name + '.' + short_ext) 231 231 return (short_name.ljust(8)[0:8], short_ext.ljust(3)[0:3], False) 232 232 233 233 # For filenames with multiple extensions, we treat the last one 234 234 # as the actual extension. The rest of the filename is stripped … … 236 236 for part in ascii_parts[1:-1]: 237 237 short_name += part 238 238 239 239 for number in range(1, 999999): 240 240 number_str = ('~' + str(number)).upper() 241 241 242 242 if len(short_name) + len(number_str) > 8: 243 243 short_name = short_name[0:8 - len(number_str)] 244 244 245 245 short_name += number_str; 246 246 247 247 if not (short_name + '.' + short_ext) in name83_list: 248 248 break 249 249 250 250 name83_list.append(short_name + '.' + short_ext) 251 251 return (short_name.ljust(8)[0:8], short_ext.ljust(3)[0:3], True) … … 253 253 def create_lfn_dirent(name, seq, checksum): 254 254 "Create LFN directory entry" 255 255 256 256 entry = xstruct.create(LFN_DIR_ENTRY) 257 257 name_rest = name[26:] 258 258 259 259 if len(name_rest) > 0: 260 260 entry.seq = seq 261 261 else: 262 262 entry.seq = seq | 0x40 263 263 264 264 entry.name1 = name[0:10] 265 265 entry.name2 = name[10:22] 266 266 entry.name3 = name[22:26] 267 267 268 268 entry.attr = 0x0F 269 269 entry.rec_type = 0 270 270 entry.checksum = checksum 271 271 entry.cluster = 0 272 272 273 273 return (entry, name_rest) 274 274 275 275 def lfn_checksum(name): 276 276 "Calculate LFN checksum" 277 277 278 278 checksum = 0 279 279 for i in range(0, 11): 280 280 checksum = (((checksum & 1) << 7) + (checksum >> 1) + ord(name[i])) & 0xFF 281 281 282 282 return checksum 283 283 284 284 def create_dirent(name, name83_list, directory, cluster, size): 285 285 short_name, short_ext, lfn = fat_name83(name, name83_list) 286 286 287 287 dir_entry = xstruct.create(DIR_ENTRY) 288 288 289 289 dir_entry.name = short_name 290 290 dir_entry.ext = short_ext 291 291 292 292 if (directory): 293 293 dir_entry.attr = 0x30 294 294 else: 295 295 dir_entry.attr = 0x20 296 296 297 297 dir_entry.lcase = 0x18 298 298 dir_entry.ctime_fine = 0 # FIXME … … 303 303 dir_entry.mdate = 0 # FIXME 304 304 dir_entry.cluster = cluster 305 305 306 306 if (directory): 307 307 dir_entry.size = 0 308 308 else: 309 309 dir_entry.size = size 310 310 311 311 if not lfn: 312 312 return [dir_entry] 313 313 314 314 long_name = name.encode('utf_16_le') 315 315 entries = [dir_entry] 316 316 317 317 seq = 1 318 318 checksum = lfn_checksum(dir_entry.name + dir_entry.ext) 319 319 320 320 while len(long_name) > 0: 321 321 long_entry, long_name = create_lfn_dirent(long_name, seq, checksum) 322 322 entries.append(long_entry) 323 323 seq += 1 324 324 325 325 entries.reverse() 326 326 return entries … … 328 328 def create_dot_dirent(empty_cluster): 329 329 dir_entry = xstruct.create(DOT_DIR_ENTRY) 330 330 331 331 dir_entry.signature = 0x2e 332 332 dir_entry.name = b' ' 333 333 dir_entry.ext = b' ' 334 334 dir_entry.attr = 0x10 335 335 336 336 dir_entry.ctime_fine = 0 # FIXME 337 337 dir_entry.ctime = 0 # FIXME … … 342 342 dir_entry.cluster = empty_cluster 343 343 dir_entry.size = 0 344 344 345 345 return dir_entry 346 346 347 347 def create_dotdot_dirent(parent_cluster): 348 348 dir_entry = xstruct.create(DOTDOT_DIR_ENTRY) 349 349 350 350 dir_entry.signature = [0x2e, 0x2e] 351 351 dir_entry.name = b' ' 352 352 dir_entry.ext = b' ' 353 353 dir_entry.attr = 0x10 354 354 355 355 dir_entry.ctime_fine = 0 # FIXME 356 356 dir_entry.ctime = 0 # FIXME … … 361 361 dir_entry.cluster = parent_cluster 362 362 dir_entry.size = 0 363 363 364 364 return dir_entry 365 365 366 366 def recursion(head, root, outf, cluster_size, root_start, data_start, fat, reserved_clusters, dirent_size, parent_cluster): 367 367 "Recursive directory walk" 368 368 369 369 directory = [] 370 370 name83_list = [] 371 371 372 372 if not head: 373 373 # Directory cluster preallocation 374 374 empty_cluster = fat.index(0) 375 375 fat[empty_cluster] = 0xFFFF 376 376 377 377 directory.append(create_dot_dirent(empty_cluster)) 378 378 directory.append(create_dotdot_dirent(parent_cluster)) 379 379 else: 380 380 empty_cluster = 0 381 381 382 382 for item in listdir_items(root): 383 383 if item.is_file: … … 387 387 rv = recursion(False, item.path, outf, cluster_size, root_start, data_start, fat, reserved_clusters, dirent_size, empty_cluster) 388 388 directory.extend(create_dirent(item.name, name83_list, True, rv[0], rv[1])) 389 389 390 390 if head: 391 391 outf.seek(root_start) … … 410 410 uint32_t hidden /* hidden sectors */ 411 411 uint32_t sectors_big /* total number of sectors (if sectors == 0) */ 412 412 413 413 /* Extended BIOS Parameter Block */ 414 414 uint8_t drive /* physical drive number */ … … 438 438 usage(sys.argv[0]) 439 439 return 440 440 441 441 if (not sys.argv[1].isdigit()): 442 442 print("<EXTRA_BYTES> must be a number") 443 443 return 444 444 445 445 extra_bytes = int(sys.argv[1]) 446 446 447 447 path = os.path.abspath(sys.argv[2]) 448 448 if (not os.path.isdir(path)): 449 449 print("<PATH> must be a directory") 450 450 return 451 451 452 452 fat16_clusters = 4096 453 453 454 454 sector_size = 512 455 455 cluster_size = 4096 … … 458 458 fat_count = 2 459 459 reserved_clusters = 2 460 460 461 461 # Make sure the filesystem is large enough for FAT16 462 462 size = subtree_size(path, cluster_size, dirent_size) + reserved_clusters * cluster_size + extra_bytes … … 467 467 else: 468 468 size = fat16_clusters * cluster_size + reserved_clusters * cluster_size 469 469 470 470 root_size = align_up(root_entries(path) * dirent_size, cluster_size) 471 471 472 472 fat_size = align_up(align_up(size, cluster_size) // cluster_size * fatent_size, sector_size) 473 473 474 474 sectors = (cluster_size + fat_count * fat_size + root_size + size) // sector_size 475 475 root_start = cluster_size + fat_count * fat_size 476 476 data_start = root_start + root_size 477 477 478 478 outf = open(sys.argv[3], "wb") 479 479 480 480 boot_sector = xstruct.create(BOOT_SECTOR) 481 481 boot_sector.jmp = [0xEB, 0x3C, 0x90] … … 499 499 else: 500 500 boot_sector.sectors_big = 0 501 501 502 502 boot_sector.drive = 0x80 503 503 boot_sector.extboot_signature = 0x29 … … 506 506 boot_sector.fstype = b'FAT16 ' 507 507 boot_sector.boot_signature = [0x55, 0xAA] 508 508 509 509 outf.write(boot_sector.pack()) 510 510 511 511 empty_sector = xstruct.create(EMPTY_SECTOR) 512 512 513 513 # Reserved sectors 514 514 for i in range(1, cluster_size // sector_size): 515 515 outf.write(empty_sector.pack()) 516 516 517 517 # FAT tables 518 518 for i in range(0, fat_count): 519 519 for j in range(0, fat_size // sector_size): 520 520 outf.write(empty_sector.pack()) 521 521 522 522 # Root directory 523 523 for i in range(0, root_size // sector_size): 524 524 outf.write(empty_sector.pack()) 525 525 526 526 # Data 527 527 for i in range(0, size // sector_size): 528 528 outf.write(empty_sector.pack()) 529 529 530 530 fat = array.array('L', [0] * (fat_size // fatent_size)) 531 531 fat[0] = 0xfff8 532 532 fat[1] = 0xffff 533 533 534 534 recursion(True, path, outf, cluster_size, root_start, data_start, fat, reserved_clusters, dirent_size, 0) 535 535 536 536 # Store FAT 537 537 fat_entry = xstruct.create(FAT_ENTRY) … … 541 541 fat_entry.next = fat[j] 542 542 outf.write(fat_entry.pack()) 543 543 544 544 outf.close() 545 545 -
tools/mktmpfs.py
r3061bc1 ra35b458 69 69 def recursion(root, outf): 70 70 "Recursive directory walk" 71 71 72 72 for item in listdir_items(root): 73 73 if item.is_file: … … 77 77 dentry.fname = item.name.encode('ascii') 78 78 dentry.flen = item.size 79 79 80 80 outf.write(dentry.pack()) 81 81 82 82 for data in chunks(item, 4096): 83 83 outf.write(data) 84 84 85 85 elif item.is_dir: 86 86 dentry = xstruct.create(DENTRY_DIRECTORY % len(item.name)) … … 88 88 dentry.fname_len = len(item.name) 89 89 dentry.fname = item.name.encode('ascii') 90 90 91 91 outf.write(dentry.pack()) 92 92 93 93 recursion(item.path, outf) 94 94 95 95 dentry = xstruct.create(DENTRY_NONE) 96 96 dentry.kind = TMPFS_NONE 97 97 dentry.fname_len = 0 98 98 99 99 outf.write(dentry.pack()) 100 100 … … 103 103 usage(sys.argv[0]) 104 104 return 105 105 106 106 path = os.path.abspath(sys.argv[1]) 107 107 if (not os.path.isdir(path)): 108 108 print("<PATH> must be a directory") 109 109 return 110 110 111 111 outf = open(sys.argv[2], "wb") 112 112 113 113 header = xstruct.create(HEADER) 114 114 header.tag = b"TMPFS" 115 115 116 116 outf.write(header.pack()) 117 117 118 118 recursion(path, outf) 119 119 120 120 dentry = xstruct.create(DENTRY_NONE) 121 121 dentry.kind = TMPFS_NONE 122 122 dentry.fname_len = 0 123 123 124 124 outf.write(dentry.pack()) 125 125 126 126 outf.close() 127 127 128 128 if __name__ == '__main__': 129 129 main() -
tools/toolchain.sh
r3061bc1 ra35b458 85 85 HEADER="$2" 86 86 BODY="$3" 87 87 88 88 FNAME="/tmp/conftest-$$" 89 89 90 90 echo "#include ${HEADER}" > "${FNAME}.c" 91 91 echo >> "${FNAME}.c" … … 95 95 echo " return 0;" >> "${FNAME}.c" 96 96 echo "}" >> "${FNAME}.c" 97 97 98 98 cc $CFLAGS -c -o "${FNAME}.o" "${FNAME}.c" 2> "${FNAME}.log" 99 99 RC="$?" 100 100 101 101 if [ "$RC" -ne "0" ] ; then 102 102 if [ "${DEPENDENCY}" == "isl" ]; then 103 103 BUILD_ISL=true 104 104 105 105 echo " isl not found. Will be downloaded and built with GCC." 106 106 else … … 134 134 echo 135 135 echo "Script failed: $2" 136 136 137 137 exit 1 138 138 fi … … 142 142 FILE="$1" 143 143 SUM="$2" 144 144 145 145 COMPUTED="`md5sum "${FILE}" | cut -d' ' -f1`" 146 146 if [ "${SUM}" != "${COMPUTED}" ] ; then 147 147 echo 148 148 echo "Checksum of ${FILE} does not match." 149 149 150 150 exit 2 151 151 fi … … 193 193 echo "feature that is not fully supported." 194 194 echo 195 195 196 196 exit 3 197 197 } … … 203 203 show_countdown() { 204 204 TM="$1" 205 205 206 206 if [ "${TM}" -eq 0 ] ; then 207 207 echo 208 208 return 0 209 209 fi 210 210 211 211 echo -n "${TM} " 212 212 change_title "${TM}" 213 213 sleep 1 214 214 215 215 TM="`expr "${TM}" - 1`" 216 216 show_countdown "${TM}" … … 243 243 FILE="$2" 244 244 CHECKSUM="$3" 245 245 246 246 if [ ! -f "${FILE}" ] ; then 247 247 change_title "Downloading ${FILE}" 248 248 wget -c "${SOURCE}${FILE}" -O "${FILE}".part 249 249 check_error $? "Error downloading ${FILE}." 250 250 251 251 mv "${FILE}".part "${FILE}" 252 252 fi 253 253 254 254 check_md5 "${FILE}" "${CHECKSUM}" 255 255 } … … 257 257 source_check() { 258 258 FILE="$1" 259 259 260 260 if [ ! -f "${FILE}" ] ; then 261 261 echo 262 262 echo "File ${FILE} not found." 263 263 264 264 exit 4 265 265 fi … … 268 268 cleanup_dir() { 269 269 DIR="$1" 270 270 271 271 if [ -d "${DIR}" ] ; then 272 272 change_title "Removing ${DIR}" … … 279 279 DIR="$1" 280 280 DESC="$2" 281 281 282 282 change_title "Creating ${DESC}" 283 283 echo ">>> Creating ${DESC}" 284 284 285 285 mkdir -p "${DIR}" 286 286 test -d "${DIR}" … … 292 292 BASE="$2" 293 293 ORIGINAL="`pwd`" 294 294 295 295 mkdir -p "${OUTSIDE}" 296 296 297 297 cd "${OUTSIDE}" 298 298 check_error $? "Unable to change directory to ${OUTSIDE}." 299 299 ABS_OUTSIDE="`pwd`" 300 300 301 301 cd "${BASE}" 302 302 check_error $? "Unable to change directory to ${BASE}." 303 303 ABS_BASE="`pwd`" 304 304 305 305 cd "${ORIGINAL}" 306 306 check_error $? "Unable to change directory to ${ORIGINAL}." 307 307 308 308 BASE_LEN="${#ABS_BASE}" 309 309 OUTSIDE_TRIM="${ABS_OUTSIDE:0:${BASE_LEN}}" 310 310 311 311 if [ "${OUTSIDE_TRIM}" == "${ABS_BASE}" ] ; then 312 312 echo 313 313 echo "CROSS_PREFIX cannot reside within the working directory." 314 314 315 315 exit 5 316 316 fi … … 320 320 FILE="$1" 321 321 DESC="$2" 322 322 323 323 change_title "Unpacking ${DESC}" 324 324 echo " >>> Unpacking ${DESC}" 325 325 326 326 case "${FILE}" in 327 327 *.gz) … … 345 345 PATCH_STRIP="$2" 346 346 DESC="$3" 347 347 348 348 change_title "Patching ${DESC}" 349 349 echo " >>> Patching ${DESC} with ${PATCH_FILE}" 350 350 351 351 patch -t "-p${PATCH_STRIP}" <"$PATCH_FILE" 352 352 check_error $? "Error patching ${DESC}." … … 357 357 check_dependecies 358 358 show_countdown 10 359 359 360 360 BINUTILS_SOURCE="ftp://ftp.gnu.org/gnu/binutils/" 361 361 GCC_SOURCE="ftp://ftp.gnu.org/gnu/gcc/gcc-${GCC_VERSION}/" 362 362 GDB_SOURCE="ftp://ftp.gnu.org/gnu/gdb/" 363 363 ISL_SOURCE="http://isl.gforge.inria.fr/" 364 364 365 365 download_fetch "${BINUTILS_SOURCE}" "${BINUTILS}" "9e8340c96626b469a603c15c9d843727" 366 366 download_fetch "${GCC_SOURCE}" "${GCC}" "6bf56a2bca9dac9dbbf8e8d1036964a8" 367 367 download_fetch "${GDB_SOURCE}" "${GDB}" "06c8f40521ed65fe36ebc2be29b56942" 368 368 369 369 if $BUILD_ISL ; then 370 370 download_fetch "${ISL_SOURCE}" "${ISL}" "11436d6b205e516635b666090b94ab32" … … 426 426 build_target() { 427 427 PLATFORM="$1" 428 428 429 429 # This sets the *_TARGET variables 430 430 set_target_from_platform "$PLATFORM" … … 434 434 TARGET="$LINUX_TARGET" 435 435 fi 436 436 437 437 WORKDIR="${BASEDIR}/${TARGET}" 438 438 INSTALL_DIR="${WORKDIR}/PKG" … … 442 442 OBJDIR="${WORKDIR}/gcc-obj" 443 443 GDBDIR="${WORKDIR}/gdb-${GDB_VERSION}" 444 444 445 445 if [ -z "${CROSS_PREFIX}" ] ; then 446 446 CROSS_PREFIX="/usr/local/cross" 447 447 fi 448 448 449 449 PREFIX="${CROSS_PREFIX}/${TARGET}" 450 450 451 451 echo ">>> Downloading tarballs" 452 452 source_check "${BASEDIR}/${BINUTILS}" … … 456 456 source_check "${BASEDIR}/${ISL}" 457 457 fi 458 458 459 459 echo ">>> Removing previous content" 460 460 cleanup_dir "${WORKDIR}" 461 461 462 462 create_dir "${OBJDIR}" "GCC object directory" 463 463 464 464 check_dirs "${PREFIX}" "${WORKDIR}" 465 465 466 466 echo ">>> Unpacking tarballs" 467 467 cd "${WORKDIR}" 468 468 check_error $? "Change directory failed." 469 469 470 470 unpack_tarball "${BASEDIR}/${BINUTILS}" "binutils" 471 471 unpack_tarball "${BASEDIR}/${GCC}" "GCC" … … 475 475 mv "${ISLDIR}" "${GCCDIR}"/isl 476 476 fi 477 477 478 478 echo ">>> Applying patches" 479 479 for p in $BINUTILS_PATCHES ; do … … 486 486 patch_sources "${SRCDIR}/${p}" 0 "GDB" 487 487 done 488 488 489 489 echo ">>> Processing binutils (${PLATFORM})" 490 490 cd "${BINUTILSDIR}" 491 491 check_error $? "Change directory failed." 492 492 493 493 change_title "binutils: configure (${PLATFORM})" 494 494 CFLAGS=-Wno-error ./configure \ … … 498 498 --enable-deterministic-archives 499 499 check_error $? "Error configuring binutils." 500 500 501 501 change_title "binutils: make (${PLATFORM})" 502 502 make all 503 503 check_error $? "Error compiling binutils." 504 504 505 505 change_title "binutils: install (${PLATFORM})" 506 506 make install "DESTDIR=${INSTALL_DIR}" 507 507 check_error $? "Error installing binutils." 508 509 508 509 510 510 echo ">>> Processing GCC (${PLATFORM})" 511 511 cd "${OBJDIR}" 512 512 check_error $? "Change directory failed." 513 513 514 514 change_title "GCC: configure (${PLATFORM})" 515 515 PATH="$PATH:${INSTALL_DIR}/${PREFIX}/bin" "${GCCDIR}/configure" \ … … 521 521 --disable-shared --enable-lto --disable-werror 522 522 check_error $? "Error configuring GCC." 523 523 524 524 change_title "GCC: make (${PLATFORM})" 525 525 PATH="${PATH}:${PREFIX}/bin:${INSTALL_DIR}/${PREFIX}/bin" make all-gcc 526 526 check_error $? "Error compiling GCC." 527 527 528 528 change_title "GCC: install (${PLATFORM})" 529 529 PATH="${PATH}:${INSTALL_DIR}/${PREFIX}/bin" make install-gcc "DESTDIR=${INSTALL_DIR}" 530 530 check_error $? "Error installing GCC." 531 532 531 532 533 533 # No GDB support for RISC-V so far 534 534 if [ "$PLATFORM" != "riscv64" ] ; then … … 536 536 cd "${GDBDIR}" 537 537 check_error $? "Change directory failed." 538 538 539 539 change_title "GDB: configure (${PLATFORM})" 540 540 PATH="$PATH:${INSTALL_DIR}/${PREFIX}/bin" ./configure \ … … 543 543 --enable-werror=no --without-guile 544 544 check_error $? "Error configuring GDB." 545 545 546 546 change_title "GDB: make (${PLATFORM})" 547 547 PATH="${PATH}:${PREFIX}/bin:${INSTALL_DIR}/${PREFIX}/bin" make all 548 548 check_error $? "Error compiling GDB." 549 549 550 550 change_title "GDB: make (${PLATFORM})" 551 551 PATH="${PATH}:${INSTALL_DIR}/${PREFIX}/bin" make install "DESTDIR=${INSTALL_DIR}" 552 552 check_error $? "Error installing GDB." 553 553 fi 554 554 555 555 # Symlink clang and lld to the install path. 556 556 CLANG="`which clang 2> /dev/null || echo "/usr/bin/clang"`" 557 557 LLD="`which ld.lld 2> /dev/null || echo "/usr/bin/ld.lld"`" 558 558 559 559 ln -s $CLANG "${INSTALL_DIR}/${PREFIX}/bin/${TARGET}-clang" 560 560 ln -s $LLD "${INSTALL_DIR}/${PREFIX}/bin/${TARGET}-ld.lld" 561 561 562 562 if $REAL_INSTALL ; then 563 563 echo ">>> Moving to the destination directory." … … 566 566 mv "${INSTALL_DIR}/${PREFIX}" "${PREFIX}" 567 567 fi 568 568 569 569 cd "${BASEDIR}" 570 570 check_error $? "Change directory failed." 571 571 572 572 echo ">>> Cleaning up" 573 573 cleanup_dir "${WORKDIR}" 574 574 575 575 echo 576 576 echo ">>> Cross-compiler for ${TARGET} installed." … … 647 647 build_target "arm32" & 648 648 wait 649 649 650 650 build_target "ia32" & 651 651 build_target "ia64" & 652 652 wait 653 653 654 654 build_target "mips32" & 655 655 build_target "mips32eb" & 656 656 wait 657 657 658 658 build_target "mips64" & 659 659 build_target "ppc32" & 660 660 wait 661 661 662 662 build_target "riscv64" & 663 663 build_target "ppc64" & 664 664 wait 665 665 666 666 build_target "sparc64" & 667 667 wait -
tools/travis.sh
r3061bc1 ra35b458 119 119 elif [ "$1" = "install" ]; then 120 120 set -x 121 121 122 122 # Install dependencies 123 123 sudo apt-get -qq update || exit 1 … … 133 133 elif [ "$1" = "run" ]; then 134 134 set -x 135 135 136 136 # Expected output filename (bootable image) 137 137 H_OUTPUT_FILENAME=`h_get_arch_config $H_ARCH_CONFIG_OUTPUT_FILENAME` … … 150 150 fi 151 151 152 152 153 153 # Build it 154 154 make "PROFILE=$H_ARCH" HANDS_OFF=y || exit 1 155 155 test -s "$H_OUTPUT_FILENAME" || exit 1 156 156 157 157 echo 158 158 echo "HelenOS for $H_ARCH built okay." … … 165 165 echo "Repository used is $H_HARBOURS_REPOSITORY." 166 166 echo 167 167 168 168 H_HELENOS_HOME=`pwd` 169 169 cd "$HOME" || exit 1 … … 181 181 echo "machine =" `echo "$H_ARCH" | cut -d/ -f 2` 182 182 ) >hsct.conf || exit 1 183 183 184 184 # "$HOME/helenos-harbours/hsct.sh" init "$H_HELENOS_HOME" "$H_ARCH" build >/dev/null 2>/dev/null || exit 1 185 185 186 186 "$HOME/helenos-harbours/hsct.sh" update || exit 1 187 187 … … 196 196 tail -n 100 "run-$HARBOUR.log" 197 197 fi 198 198 199 199 done 200 200 201 201 if [ -n "$FAILED_HARBOURS" ]; then 202 202 echo -
tools/xstruct.py
r3061bc1 ra35b458 72 72 def size(self): 73 73 return struct.calcsize(self._format_) 74 74 75 75 def pack(self): 76 76 args = [] … … 90 90 args.append(value) 91 91 return struct.pack(self._format_, *args) 92 92 93 93 def unpack(self, data): 94 94 values = struct.unpack(self._format_, data) … … 100 100 def create(definition): 101 101 "Create structure object" 102 102 103 103 tokens = definition.split(None) 104 104 105 105 # Initial byte order tag 106 106 format = { … … 111 111 inst = Struct() 112 112 args = [] 113 113 114 114 # Member tags 115 115 comment = False … … 120 120 comment = False 121 121 continue 122 122 123 123 if (token == "/*"): 124 124 comment = True 125 125 continue 126 126 127 127 if (variable != None): 128 128 subtokens = token.split("[") 129 129 130 130 length = None 131 131 if (len(subtokens) > 1): 132 132 length = int(subtokens[1].split("]")[0]) 133 133 format += "%d" % length 134 134 135 135 format += variable 136 136 137 137 inst.__dict__[subtokens[0]] = None 138 138 args.append((subtokens[0], variable, length)) 139 139 140 140 variable = None 141 141 continue 142 142 143 143 if (token[0:8] == "padding["): 144 144 size = token[8:].split("]")[0] 145 145 format += "%dx" % int(size) 146 146 continue 147 147 148 148 variable = { 149 149 "char": lambda: "s", … … 152 152 "uint32_t": lambda: "L", 153 153 "uint64_t": lambda: "Q", 154 154 155 155 "int8_t": lambda: "b", 156 156 "int16_t": lambda: "h", … … 158 158 "int64_t": lambda: "q" 159 159 }[token]() 160 160 161 161 inst.__dict__['_format_'] = format 162 162 inst.__dict__['_args_'] = args -
tools/xtui.py
r3061bc1 ra35b458 35 35 def call_dlg(dlgcmd, *args, **kw): 36 36 "Wrapper for calling 'dialog' program" 37 37 38 38 indesc, outdesc = os.pipe() 39 39 pid = os.fork() … … 41 41 os.dup2(outdesc, 2) 42 42 os.close(indesc) 43 43 44 44 dlgargs = [dlgcmd] 45 45 for key, val in kw.items(): 46 46 dlgargs.append('--' + key) 47 47 dlgargs.append(val) 48 48 49 49 dlgargs += args 50 50 os.execlp(dlgcmd, *dlgargs) 51 51 52 52 os.close(outdesc) 53 53 54 54 try: 55 55 errout = os.fdopen(indesc, 'r') … … 61 61 os.system('reset') 62 62 raise 63 63 64 64 if (not os.WIFEXITED(status)): 65 65 # Reset terminal 66 66 os.system('reset') 67 67 raise EOFError 68 68 69 69 status = os.WEXITSTATUS(status) 70 70 if (status == 255): 71 71 raise EOFError 72 72 73 73 return (status, data) 74 74 … … 79 79 except ImportError: 80 80 newt = False 81 81 82 82 dlgcmd = os.environ.get('DIALOG', 'dialog') 83 83 if (call_dlg(dlgcmd, '--print-maxsize')[0] != 0): … … 91 91 def width_fix(screen, width): 92 92 "Correct width to screen size" 93 93 94 94 if (width + width_extra > screen.width): 95 95 width = screen.width - width_extra 96 96 97 97 if (width <= 0): 98 98 width = screen.width 99 99 100 100 return width 101 101 102 102 def height_fix(screen, height): 103 103 "Correct height to screen size" 104 104 105 105 if (height + height_extra > screen.height): 106 106 height = screen.height - height_extra 107 107 108 108 if (height <= 0): 109 109 height = screen.height 110 110 111 111 return height 112 112 113 113 def screen_init(): 114 114 "Initialize the screen" 115 115 116 116 if (newt): 117 117 return snack.SnackScreen() 118 118 119 119 return None 120 120 121 121 def screen_done(screen): 122 122 "Cleanup the screen" 123 123 124 124 if (newt): 125 125 screen.finish() … … 127 127 def choice_window(screen, title, text, options, position): 128 128 "Create options menu" 129 129 130 130 maxopt = 0 131 131 for option in options: … … 133 133 if (length > maxopt): 134 134 maxopt = length 135 135 136 136 width = maxopt 137 137 height = len(options) 138 138 139 139 if (newt): 140 140 width = width_fix(screen, width + width_extra) 141 141 height = height_fix(screen, height) 142 142 143 143 if (height > 3): 144 144 large = True 145 145 else: 146 146 large = False 147 147 148 148 buttonbar = snack.ButtonBar(screen, ('Done', 'Cancel')) 149 149 textbox = snack.TextboxReflowed(width, text) 150 150 listbox = snack.Listbox(height, scroll = large, returnExit = 1) 151 151 152 152 cnt = 0 153 153 for option in options: 154 154 listbox.append(option, cnt) 155 155 cnt += 1 156 156 157 157 if (position != None): 158 158 listbox.setCurrent(position) 159 159 160 160 grid = snack.GridForm(screen, title, 1, 3) 161 161 grid.add(textbox, 0, 0) 162 162 grid.add(listbox, 0, 1, padding = (0, 1, 0, 1)) 163 163 grid.add(buttonbar, 0, 2, growx = 1) 164 164 165 165 retval = grid.runOnce() 166 166 167 167 return (buttonbar.buttonPressed(retval), listbox.current()) 168 168 elif (dialog): 169 169 if (width < 35): 170 170 width = 35 171 171 172 172 args = [] 173 173 cnt = 0 … … 175 175 args.append(str(cnt + 1)) 176 176 args.append(option) 177 177 178 178 cnt += 1 179 179 180 180 kw = {} 181 181 if (position != None): 182 182 kw['default-item'] = str(position + 1) 183 183 184 184 status, data = call_dlg(dlgcmd, '--title', title, '--extra-button', '--extra-label', 'Done', '--menu', text, str(height + height_extra), str(width + width_extra), str(cnt), *args, **kw) 185 185 186 186 if (status == 1): 187 187 return ('cancel', None) 188 188 189 189 try: 190 190 choice = int(data) - 1 191 191 except ValueError: 192 192 return ('cancel', None) 193 193 194 194 if (status == 0): 195 195 return (None, choice) 196 196 197 197 return ('done', choice) 198 198 199 199 sys.stdout.write("\n *** %s *** \n%s\n\n" % (title, text)) 200 200 201 201 maxcnt = len(str(len(options))) 202 202 cnt = 0 … … 204 204 sys.stdout.write("%*s. %s\n" % (maxcnt, cnt + 1, option)) 205 205 cnt += 1 206 206 207 207 sys.stdout.write("\n%*s. Done\n" % (maxcnt, '0')) 208 208 sys.stdout.write("%*s. Quit\n\n" % (maxcnt, 'q')) 209 209 210 210 while True: 211 211 if (position != None): … … 217 217 sys.stdout.write("Selection[0]: ") 218 218 inp = sys.stdin.readline() 219 219 220 220 if (not inp): 221 221 raise EOFError 222 222 223 223 if (not inp.strip()): 224 224 if (position != None): … … 229 229 else: 230 230 inp = '0' 231 231 232 232 if (inp.strip() == 'q'): 233 233 return ('cancel', None) 234 234 235 235 try: 236 236 choice = int(inp.strip()) 237 237 except ValueError: 238 238 continue 239 239 240 240 if (choice == 0): 241 241 return ('done', 0) 242 242 243 243 if (choice < 1) or (choice > len(options)): 244 244 continue 245 245 246 246 return (None, choice - 1) 247 247 248 248 def error_dialog(screen, title, msg): 249 249 "Print error dialog" 250 250 251 251 width = len(msg) 252 252 253 253 if (newt): 254 254 width = width_fix(screen, width) 255 255 256 256 buttonbar = snack.ButtonBar(screen, ['Ok']) 257 257 textbox = snack.TextboxReflowed(width, msg) 258 258 259 259 grid = snack.GridForm(screen, title, 1, 2) 260 260 grid.add(textbox, 0, 0, padding = (0, 0, 0, 1))
Note:
See TracChangeset
for help on using the changeset viewer.