############################################################################### # # common.eagle -- # # Written by Joe Mistachkin. # Released to the public domain, use at your own risk! # ############################################################################### # # NOTE: Use our own namespace here because even though we do not directly # support namespaces ourselves, we do not want to pollute the global # namespace if this script actually ends up being evaluated in Tcl. # namespace eval ::Eagle { if {[isEagle]} then { ########################################################################### ############################ BEGIN Eagle ONLY ############################# ########################################################################### proc getBuildYear {} { # # NOTE: See if the "year" setting has been overridden by the user (e.g. on # the command line). This helps control exactly which set of # binaries we are testing, those produced using either the Visual # Studio 2008 or Visual Studio 2010 build systems. To override this # value via the command line, enter a command similar to one of the # following (all on one line): # # EagleShell.exe -preInitialize "set test_year 2008" # -file .\path\to\all.eagle # # EagleShell.exe -preInitialize "set test_year 2010" # -file .\path\to\all.eagle # # EagleShell.exe -preInitialize "unset -nocomplain test_year" # -file .\path\to\all.eagle # if {[info exists ::test_year] && [string length $::test_year] > 0} then { # # NOTE: Use the specified test year. If this variable is not set, the # default value will be based on whether or not Eagle has been # compiled against the .NET Framework 2.0 or 4.0. # return $::test_year } else { # # NOTE: If Eagle has been compiled against the .NET Framework 4.0, use # "2010" as the test year; otherwise, use "2008". If another # major [incompatible] version of the .NET Framework is released, # this check will have to be changed. # return [expr {[haveConstraint imageRuntime40] ? "2010" : "2008"}] } } proc getBuildConfiguration {} { # # NOTE: See if the "configuration" setting has been overridden by the user # (e.g. on the command line). This helps control exactly which set # of binaries we are testing (i.e. those built in the "Debug" or # "Release" build configurations). To override this value via the # command line, enter a command similar to one of the following (all # on one line): # # EagleShell.exe -preInitialize "set test_configuration Debug" # -file .\path\to\all.eagle # # EagleShell.exe -preInitialize "set test_configuration Release" # -file .\path\to\all.eagle # # EagleShell.exe -file .\path\to\all.eagle -preTest # "unset -nocomplain test_configuration" # if {[info exists ::test_configuration] && \ [string length $::test_configuration] > 0} then { # # NOTE: Use the specified test configuration. The default value used # for this variable is "Release", as set by the test suite itself. # return $::test_configuration } else { # # NOTE: Normally, we will never hit this case because the value of the # test configuration variable is always set by the test suite # itself; however, it can be overridden using the unset command # from the -preTest option to the test suite. # return $::eagle_platform(configuration) } } proc getBuildDirectory {} { # # NOTE: See if the "native" runtime option has been added. If so, use the # directory for the mixed-mode assembly (a.k.a. the native interop # assembly). To enable this option via the command line, enter a # command similar to one of the following (all on one line): # # EagleShell.exe -initialize -runtimeOption native # -file .\path\to\all.eagle # # To enable this option via the command line prior to the "beta 16" # release of Eagle, the following command must be used instead # (also all on one line): # # EagleShell.exe -initialize -postInitialize # "object invoke Interpreter.GetActive AddRuntimeOption native" # -file .\path\to\all.eagle # if {[info exists ::build_directory] && \ [string length $::build_directory] > 0} then { # # NOTE: The location of the build directory has been overridden; # therefore, use it verbatim. # return $::build_directory } else { # # NOTE: Figure out the build base directory. This will be the directory # that contains the actual build output directory (e.g. "bin"). # if {[info exists ::build_base_directory] && \ [string length $::build_base_directory] > 0} then { # # NOTE: The location of the build base directory has been overridden; # therefore, use it verbatim. # set path $::build_base_directory } elseif {[info exists ::common_directory] && \ [string length $::common_directory] > 0} then { # # NOTE: Next, fallback to the parent directory of the one containing # this file (i.e. "common.eagle"), if available. # set path [file dirname $::common_directory] } else { # # NOTE: Finally, fallback to the parent directory of the EagleTest # path. The EagleTest package guarantees that this variable # will be set to the directory containing the first file to # execute the [runTestPrologue] script library procedure. # set path [file dirname $::path] } if {[hasRuntimeOption native]} then { return [file join $path bin [getBuildYear] \ [machineToPlatform $::tcl_platform(machine)] \ [getBuildConfiguration]] } else { return [file join $path bin [getBuildYear] \ [getBuildConfiguration] bin] } } } proc getBuildFileName { fileName } { return [file nativename \ [file join [getBuildDirectory] [file tail $fileName]]] } proc getBinaryDirectory {} { # # NOTE: This procedure returns the directory where the test application # itself (i.e. the Eagle shell) is located. This will be used as # the destination for the copied System.Data.SQLite native and # managed assemblies (i.e. because this is one of the few places # where the CLR will actually find and load them properly). # if {[info exists ::binary_directory] && \ [string length $::binary_directory] > 0} then { # # NOTE: The location of the binary directory has been overridden; # therefore, use it verbatim. # return $::binary_directory } else { return [info binary] } } proc getBinaryFileName { fileName } { return [file nativename \ [file join [getBinaryDirectory] [file tail $fileName]]] } proc getDatabaseDirectory {} { # # NOTE: This procedure returns the directory where the test databases # should be located. By default, this just uses the temporary # directory configured for this system. # if {[info exists ::database_directory] && \ [string length $::database_directory] > 0} then { # # NOTE: The location of the database directory has been overridden; # therefore, use it. # return [file normalize $::database_directory] } else { return [getTemporaryPath] } } proc getAppDomainPreamble { {prefix ""} {suffix ""} } { # # NOTE: This procedure returns a test setup script suitable for evaluation # by a test interpreter created in an isolated application domain. # The script being returned will be surrounded by the prefix and # suffix "script fragments" specified by the caller, if any. The # entire script being returned will be substituted via [subst], in # the context of the caller. This step is necessary so that some # limited context information, primarily related to the test build # directory, can be transferred to the interpreter in the isolated # application domain, making it able to successfully run tests that # require one or more of the files in the build directory. Callers # to this procedure should keep in mind that the test script being # returned cannot only rely on any script library procedures not # included in the EagleLibrary package (i.e. "init.eagle"). Also, # all variable references and all "nested" commands (i.e. those in # square brackets), unless they are specially quoted, will end up # being evaluated in the context of the calling interpreter and not # the test interpreter created in the isolated application domain. # return [uplevel 1 [list subst [appendArgs $prefix { if {[hasRuntimeOption native]} then { object invoke Interpreter.GetActive AddRuntimeOption native } set ::path {$::path} set ::test_year {[getBuildYear]} set ::test_configuration {[getBuildConfiguration]} } $suffix]]] } proc tryCopyBuildFile { fileName } { # # NOTE: If we cannot copy the assembly then it is probably already loaded. # set sourceFileName [getBuildFileName $fileName] if {![file exists $sourceFileName]} then { tputs $::test_channel [appendArgs \ "---- skipped copying build file \"" $sourceFileName \ "\", it does not exist\n"] return } set targetFileName [getBinaryFileName $fileName] if {[catch { file copy -force $sourceFileName $targetFileName}] == 0} then { tputs $::test_channel [appendArgs \ "---- copied build file from \"" $sourceFileName "\" to \"" \ $targetFileName \"\n] } else { tputs $::test_channel [appendArgs \ "---- failed to copy build file from \"" $sourceFileName \ "\" to \"" $targetFileName \"\n] } } proc tryDeleteBinaryFile { fileName } { set fileName [getBinaryFileName $fileName] if {![file exists $fileName]} then { tputs $::test_channel [appendArgs \ "---- skipped deleting binary file \"" $fileName \ "\", it does not exist\n"] return } if {[catch {file delete $fileName}] == 0} then { tputs $::test_channel [appendArgs \ "---- deleted binary file \"" $fileName \"\n] } else { tputs $::test_channel [appendArgs \ "---- failed to delete binary file \"" $fileName \"\n] } } proc tryCopyAssembly { fileName {pdb true} } { tryCopyBuildFile $fileName if {$pdb} then { tryCopyBuildFile [appendArgs [file rootname $fileName] .pdb] } } proc tryDeleteAssembly { fileName {pdb true} } { tryDeleteBinaryFile $fileName if {$pdb} then { tryDeleteBinaryFile [appendArgs [file rootname $fileName] .pdb] } } proc tryLoadAssembly { fileName } { set fileName [getBinaryFileName $fileName] if {[catch {set assembly \ [object load -loadtype File $fileName]}] == 0} then { # # NOTE: Now, add the necessary test constraint. # addConstraint [file rootname [file tail $fileName]] # # NOTE: Return the full path of the loaded file. # return $fileName } return "" } proc checkForSQLite { channel } { tputs $channel "---- checking for core SQLite library... " if {[catch {object invoke -flags +NonPublic System.Data.SQLite.SQLite3 \ SQLiteVersion} version] == 0} then { # # NOTE: Attempt to query the Fossil source identifier for the SQLite # core library. # if {[catch {object invoke -flags +NonPublic System.Data.SQLite.SQLite3 \ SQLiteSourceId} sourceId]} then { # # NOTE: We failed to query the Fossil source identifier. # set sourceId unknown } # # NOTE: Yes, the SQLite core library appears to be available. # addConstraint SQLite tputs $channel [appendArgs "yes (" $version " " $sourceId ")\n"] } else { tputs $channel no\n } } proc getDateTimeFormat {} { # # NOTE: This procedure simply returns the "default" DateTime format used # by the test suite. # if {[info exists ::datetime_format] && \ [string length $::datetime_format] > 0} then { # # NOTE: Return the manually overridden value for the DateTime format. # return $::datetime_format } else { # # NOTE: Return an ISO8601 DateTime format compatible with SQLite, # System.Data.SQLite, and suitable for round-tripping with the # DateTime class of the framework. If this value is changed, # various tests may fail. # return "yyyy-MM-dd HH:mm:ss.FFFFFFFK" } } proc enumerableToList { enumerable } { set result [list] if {[string length $enumerable] == 0 || $enumerable eq "null"} then { return $result } object foreach -alias item $enumerable { if {[string length $item] > 0} then { lappend result [$item ToString] } } return $result } proc compileCSharpWith { text resultsVarName errorsVarName fileNames args } { # # NOTE: Create the base command to evaluate and add the property settings # that are almost always needed by our unit tests (i.e. the System # and System.Data assembly references). # set command [list compileCSharp $text results errors \ ReferencedAssemblies.Add System.dll ReferencedAssemblies.Add \ System.Data.dll ReferencedAssemblies.Add System.Xml.dll] # # NOTE: Add all the provided file names as assembly references. # foreach fileName $fileNames { lappend command ReferencedAssemblies.Add [getBinaryFileName $fileName] } # # NOTE: Add the extra arguments, if any, to the command to evaluate. # eval lappend command $args # # NOTE: Alias the compiler local results and errors variables to the # variable names provided by our caller. # upvar 1 $resultsVarName results upvar 1 $errorsVarName errors # # NOTE: Evaluate the constructed [compileCSharp] command and return the # result. # eval $command } proc setupDb { fileName {mode ""} {dateTimeFormat ""} {dateTimeKind ""} {extra ""} {delete true} {varName db}} { # # NOTE: For now, all test databases used by the test suite are placed into # the temporary directory. Each database used by a test should be # cleaned up by that test using the "cleanupDb" procedure, below. # set fileName [file join [getDatabaseDirectory] [file tail $fileName]] # # NOTE: By default, delete any pre-existing database with the same file # name if it currently exists. # if {$delete && [file exists $fileName]} then { if {[catch {file delete $fileName} error]} then { # # NOTE: We somehow failed to delete the file, report why. # tputs $::test_channel [appendArgs \ "==== WARNING: failed to delete database file \"" $fileName \ "\" during setup, error: " \n\t $error \n] } } # # NOTE: Refer to the specified variable (e.g. "db") in the context of the # caller. The handle to the opened database will be stored there. # upvar 1 $varName db # # NOTE: Start building the connection string. The only required portion # of the connection string is the database file name itself. # set connection {Data Source=${fileName}} # # NOTE: If the caller specified a journal mode, add the necessary portion # of the connection string now. # if {[string length $mode] > 0} then { append connection {;Journal Mode=${mode}} } # # NOTE: If the caller specified a DateTime format, add the necessary # portion of the connection string now. # if {[string length $dateTimeFormat] > 0} then { append connection {;DateTimeFormat=${dateTimeFormat}} } # # NOTE: If the caller specified a DateTimeKind, add the necessary portion # of the connection string now. # if {[string length $dateTimeKind] > 0} then { append connection {;DateTimeKind=${dateTimeKind}} } # # NOTE: If the caller specified an extra payload to the connection string, # append it now. # if {[string length $extra] > 0} then { append connection \; $extra } # # NOTE: Open the database connection now, placing the opaque handle value # into the variable specified by the caller. # set db [sql open -type SQLite [subst $connection]] } proc cleanupDb { fileName {varName db} } { # # NOTE: Refer to the specified variable (e.g. "db") in the context of the # caller. The handle to the opened database is stored there. # upvar 1 $varName db # # NOTE: Close the connection to the database now. This should allow us to # delete the underlying database file. # if {[info exists db] && [catch {sql close $db} error]} then { # # NOTE: We somehow failed to close the database, report why. # tputs $::test_channel [appendArgs \ "==== WARNING: failed to close database \"" $db "\", error: " \ \n\t $error \n] } # # NOTE: Build the full path to the database file name. For now, all test # database files are stored in the temporary directory. # set fileName [file join [getDatabaseDirectory] [file tail $fileName]] if {[file exists $fileName]} then { # # NOTE: Attempt to delete the test database file now. # if {[set code [catch {file delete $fileName} error]]} then { # # NOTE: We somehow failed to delete the file, report why. # tputs $::test_channel [appendArgs \ "==== WARNING: failed to delete database file \"" $fileName \ "\" during cleanup, error: " \n\t $error \n] } } else { # # NOTE: The file does not exist, success! # set code 0 } return $code } proc reportSQLiteResources { channel {quiet false} {collect true} } { # # NOTE: Skip all output if we are running in "quiet" mode. # if {!$quiet} then { tputs $channel "---- current memory in use by SQLite... " } if {[catch {object invoke -flags +NonPublic \ System.Data.SQLite.UnsafeNativeMethods \ sqlite3_memory_used} memory] == 0} then { if {!$quiet} then { tputs $channel [appendArgs $memory " bytes\n"] } } else { # # NOTE: Maybe the SQLite native library is unavailable? # set memory unknown if {!$quiet} then { tputs $channel [appendArgs $memory \n] } } set result $memory; # NOTE: Return memory in-use to caller. if {!$quiet} then { tputs $channel "---- maximum memory in use by SQLite... " } if {[catch {object invoke -flags +NonPublic \ System.Data.SQLite.UnsafeNativeMethods \ sqlite3_memory_highwater 0} memory] == 0} then { if {!$quiet} then { tputs $channel [appendArgs $memory " bytes\n"] } } else { # # NOTE: Maybe the SQLite native library is unavailable? # set memory unknown if {!$quiet} then { tputs $channel [appendArgs $memory \n] } } if {$collect} then { if {[catch {object invoke GC GetTotalMemory true} error]} then { tputs $channel [appendArgs \ "==== WARNING: failed full garbage collection, error: " \ \n\t $error \n] } } if {!$quiet} then { tputs $channel "---- current memory in use by the CLR... " } if {[catch {getDictionaryValue [debug memory] gc} memory] == 0} then { if {[string is integer -strict $memory]} then { if {!$quiet} then { tputs $channel [appendArgs $memory " bytes\n"] } } else { set memory invalid if {!$quiet} then { tputs $channel [appendArgs $memory \n] } } } else { set memory unknown if {!$quiet} then { tputs $channel [appendArgs $memory \n] } } return $result } proc runSQLiteTestPrologue {} { # # NOTE: Skip running our custom prologue if the main one has been skipped. # if {![info exists ::no(prologue.eagle)]} then { # # NOTE: Skip all System.Data.SQLite related file handling (deleting, # copying, and loading) if we are so instructed. # if {![info exists ::no(sqliteFiles)]} then { # # NOTE: Skip trying to delete any files if we are so instructed. # if {![info exists ::no(deleteSqliteFiles)]} then { tryDeleteAssembly sqlite3.dll tryDeleteAssembly SQLite.Interop.dll tryDeleteAssembly System.Data.SQLite.dll tryDeleteAssembly System.Data.SQLite.Linq.dll } # # NOTE: Skip trying to copy any files if we are so instructed. # if {![info exists ::no(copySqliteFiles)]} then { tryCopyAssembly sqlite3.dll tryCopyAssembly SQLite.Interop.dll tryCopyAssembly System.Data.SQLite.dll tryCopyAssembly System.Data.SQLite.Linq.dll } # # NOTE: Skip trying to load any files if we are so instructed. # if {![info exists ::no(loadSqliteFiles)]} then { tryLoadAssembly System.Data.SQLite.dll tryLoadAssembly System.Data.SQLite.Linq.dll } } catch { tputs $::test_channel [appendArgs \ "---- file version of \"SQLite.Interop.dll\"... " \ [file version [getBinaryFileName SQLite.Interop.dll]] \n] } catch { tputs $::test_channel [appendArgs \ "---- file version of \"System.Data.SQLite.dll\"... " \ [file version [getBinaryFileName System.Data.SQLite.dll]] \n] } catch { tputs $::test_channel [appendArgs \ "---- file version of \"System.Data.SQLite.Linq.dll\"... " \ [file version [getBinaryFileName System.Data.SQLite.Linq.dll]] \n] } set assemblies [object invoke AppDomain.CurrentDomain GetAssemblies] object foreach assembly $assemblies { if {[string match \{System.Data.SQLite* $assembly]} then { tputs $::test_channel [appendArgs \ "---- found assembly: " $assembly \n] } } # # NOTE: Now, we need to know if the SQLite core library is available # (i.e. because the managed-only System.Data.SQLite assembly can # load without it; however, it cannot do anything useful without # it). If we are using the mixed-mode assembly and we already # found it (above), this should always succeed. # checkForSQLite $::test_channel # # NOTE: Report the resource usage prior to running any tests. # reportSQLiteResources $::test_channel } } proc runSQLiteTestEpilogue {} { # # NOTE: Skip running our custom epilogue if the main one has been skipped. # if {![info exists ::no(epilogue.eagle)]} then { # # NOTE: Also report the resource usage after running the tests. # reportSQLiteResources $::test_channel } } ########################################################################### ############################# END Eagle ONLY ############################## ########################################################################### } # # NOTE: Save the name of the directory containing this file. # set ::common_directory [file dirname [info script]] # # NOTE: Provide the System.Data.SQLite test package to the interpreter. # package provide System.Data.SQLite.Test 1.0 }