MZ@ !L!This program cannot be run in DOS mode. $-TٽL:L:L:FL:BL:RichL:PEd]yK" TS#pb@XSV`.rsrcXST@@8he-.0H`xb   8Ph?@EFGHr(@Xp34rs0H`x4lmn 8'P(hf!"` a( q@ rX sp      " Z 0 H ` x        8 TP Uh       N  T ( @ X p H O H  I 0 H ` x j     ( 8 H X h x           ( 8 H X h x           ( 8 H X h x           ( 8 H X h x           ( 8 H X h x           ( 8 H X h x           ( 8 H X h x    $K0+ .824h8:h>HG]hcpfhbc`8eHe8{0fxpqHtPlpxȊ :(08|hX@40Ь\HZ xP8$`0 LDHOXX~`B X |  h(b8X`PP2h'`0H123X79t9n0 @FTXUU:0[]0a@F6ab]W>0XREGISTRYHKCR { SSIS.Pipeline.2 = s 'MainPipe Object' { CLSID = s '{E3CFBEA8-1F48-40D8-91E1-2DEDC1EDDD56}' } SSIS.Pipeline = s 'MainPipe Object' { CLSID = s '{E3CFBEA8-1F48-40D8-91E1-2DEDC1EDDD56}' CurVer = s 'SSIS.Pipeline.2' } NoRemove CLSID { ForceRemove {E3CFBEA8-1F48-40D8-91E1-2DEDC1EDDD56} = s 'Data Flow Task' { ProgID = s 'SSIS.Pipeline.2' VersionIndependentProgID = s 'SSIS.Pipeline' ForceRemove 'Programmable' InprocServer32 = s '%MODULE%' { val ThreadingModel = s 'free' } 'TypeLib' = s '{6E944EA2-14A0-4997-98FA-3E1995D80BD9}' ForceRemove 'Implemented Categories' { ForceRemove {F3255910-E58B-4507-B39C-D2BF73B403FE} } ForceRemove DTSInfo { val Description = s 'Performs high-performance data extraction, transformation and loading' val TaskType = s 'DTS100' val TaskContact = s 'Performs high-performance data extraction, transformation and loading;Microsoft Corporation; Microsoft SQL Server v10; (C) 2007 Microsoft Corporation; All Rights Reserved;http://www.microsoft.com/sql/support/default.asp;1' val UITypeName = s 'Microsoft.DataTransformationServices.Design.PipelineTaskView, Microsoft.DataTransformationServices.Design, Version=%MANAGEDVERSION%, Culture=neutral, PublicKeyToken=89845dcd8080cc91' val ResourceFile = s 'DTSPipeline,108,109' } DefaultIcon = s '%MODULE%,0' } } } HKCR { DTS.BufferManager.2 = s 'CDTSBufferManager Object' { CLSID = s '{87A4C985-0E84-432C-AC1A-B6F49A04ED4F}' } DTS.BufferManager = s 'CDTSBufferManager Object' { CLSID = s '{87A4C985-0E84-432C-AC1A-B6F49A04ED4F}' CurVer = s 'DTS.BufferManager.2' } NoRemove CLSID { ForceRemove {87A4C985-0E84-432C-AC1A-B6F49A04ED4F} = s 'CDTSBufferManager Object' { ProgID = s 'DTS.BufferManager.2' VersionIndependentProgID = s 'DTS.BufferManager' ForceRemove 'Programmable' InprocServer32 = s '%MODULE%' { val ThreadingModel = s 'free' } 'TypeLib' = s '{6E944EA2-14A0-4997-98FA-3E1995D80BD9}' } } } HKCR { DTS.ManagedComponentWrapper.2 = s 'Managed Component Host' { CLSID = s '{2E42D45B-F83C-400F-8D77-61DDE6A7DF29}' } DTS.ManagedComponentWrapper = s 'Managed Component Host' { CLSID = s '{2E42D45B-F83C-400F-8D77-61DDE6A7DF29}' CurVer = s 'DTS.ManagedComponentWrapper.2' } NoRemove CLSID { ForceRemove {2E42D45B-F83C-400F-8D77-61DDE6A7DF29} = s 'Managed Component Host' { ProgID = s 'DTS.ManagedComponentWrapper.2' VersionIndependentProgID = s 'DTS.ManagedComponentWrapper' ForceRemove 'Programmable' InprocServer32 = s '%MODULE%' { val ThreadingModel = s 'free' } 'TypeLib' = s '{6E944EA2-14A0-4997-98FA-3E1995D80BD9}' ForceRemove 'DTSInfo' { val ComponentType = d 0 val CurrentVersion = d 0 ForceRemove 'EN-US' { val Description = s 'Managed Component Host' } } } } } HKCR { DTS.ErrorComponent.2 = s 'Error Component' { CLSID = s '{AB3AF7A2-ABC6-4E7B-85AC-9D771049E9D5}' } DTS.ErrorComponent = s 'Error Component' { CLSID = s '{AB3AF7A2-ABC6-4E7B-85AC-9D771049E9D5}' CurVer = s 'DTS.ErrorComponent.2' } NoRemove CLSID { ForceRemove {AB3AF7A2-ABC6-4E7B-85AC-9D771049E9D5} = s 'Error Component' { ProgID = s 'DTS.ErrorComponent.2' VersionIndependentProgID = s 'DTS.ErrorComponent' ForceRemove 'Programmable' InprocServer32 = s '%MODULE%' { val ThreadingModel = s 'free' } } } } ( QSO>H=4F7R'!`.FXT$LcTlUaX_{TmfȂnz\y|ߙ~ҢLkVş٫cfggZ[abm~ m}ShFOUj?>HHJUT{?BA<f -Ez-~_JhKXuPPlKUnB^[Zh  $Hjnj bbHj??  qbHjqbHjqbYm5ac#6pk:`=W_lN@EVXkK[8\fkBUE]^kM/*]fdCSG]^oL-)JfdASFJ^gAI0JfdATDJ^O;R1Jfd<97J^i>4'2heZ!"PQ,&%+,(.3?3C(  @UaXT$QSOLkV4F7>H=FX!`.fgg{PlB^PlҢ|ߙ\yTmTlR'Jh{Pl٫_{LchJh{Plş~fȂhJh{PlКhJhEnm~_nzKc m}ZKFj-~UJzKA[uXHTz[BZuX?UzP?auX?HzfObuXUSz?3C( @#\ZZ5<2$0=1E$^09rEq1NeVnKeNc_wQ]yoxYx\ti˅c|zhr}[rٚǖ|Vqtڕ2R<m|upwСu~ȝ<==y\ixwxzzz{zX[\^z{<@ACCSUZZ[\]@@ABctnr#cio_qeZm$pPlG[a`homB^=sLk]>Lk[FCz[FCzrQMRjrQMRjt^GWyt^GWy`?Ti`?Tim;Slm;Sl_OV{_OV{saNcYsaNcYqbDJ|qbDJ|ZH7B3XnvZH7B3Xnv5<98=@:45<98=@:4pfhIPAg}fhIPAg}pppp????????????????( @ Q]zuq1#2R<$0=15<2Ke$^0<=={PlB^PlPlPlPlPlǖٚ\tVqNcVnE\ZZPlPlPlPlPlPlPl{Pl{{{{ȝ}tڕi˅_wNe9rE{{{{{Pl{PlСYx[r{Pl{Pl||pwc|{Pl{Ploxu~{Pl{Plyhr{Pl{Plm{Pl{Pl{Pl{Pl{PltaLsCg ProductVersion10.50.1600.1DVarFileInfo$Translation  %ls Input %lu%ls Output %luInput column lineage IDVariable used.Output column lineage ID Component DTSPipelineManaged Component Wrapper?Wrapper component for loading managed SSIS data flow components\This property specifies Assembly Qualified Name of user's managed data flow component class.This property specifies the name of the Visual Studio for Applications project when a managed component wrapper (MCW) is used to host the script transformations.Connection usedData Flow TaskThe Data Flow task encapsulates the data flow engine that moves data between sources and destinations providing the facility to transform, cleanse, and modify data as it is moved.Prepare for Execute Pre-Executeoutputerrordestination adaptercolumn componentcomponent viewerpathinput Post ExecuteCleanup ValidatingInputOutput input columnsource adapter output columnpropertyruntime connection transform unspecifiedunknown object type (%1!d!) virtual inputvirtual input columnexternal column ErrorCode ErrorColumn&Path "%1!s!" (%2!d!) has been deleted.ALayout changes might have made component "%1!s!" (%2!d!) invalid. %1!s! [%2!d!]%1!s! "%2!s!" (%3!d!)"%1!s!" (%2!d!)'The default buffer size is %1!d! bytes.(Buffers will have %1!d! rows by default.wDirectory "%1!s!" could not be used for temporary storage. Error %1!d! was encountered while writing to that directory.xNo temporary buffer storage locations are available. The data flow will fail if not enough physical memory is available.{No temporary BLOB data storage locations are available. The data flow will fail if not enough physical memory is available.No temporary buffer storage locations were provided. The buffer manager will consider the directories in the TEMP and TMP environment variables.No temporary BLOB data storage locations were provided. The buffer manager will consider the directories in the TEMP and TMP environment variables.KThe %1!s! spent %2!u! milliseconds in PrimeOutput filling buffers on %3!s!.|During last execution the pipeline suspended %2!s! of %1!s! for %3!u! milliseconds to limit the number of in-memory buffers.TBuffer manager is throttling allocations to keep in-memory buffers around %1!lu!MB VMemory pressure was alleviated, buffer manager is not throttling allocations anymore Failure while loading back a swapped bufer. It may mean the system is in a low memory state and the buffer cannot get its memory allocated.Begin Path %1!lu! Begin Subpath %1!lu! End Path %1!lu! End Subpath %1!lu! Unknown object ; Unknown component ,Begin output plan Begin transform plan / End transform plan Begin source plan ' End source plan End output plan Begin path plan End path plan Begin Path Plan %1!lu! Begin Subpath Plan %1!lu! End Path Plan %1!lu! End Subpath Plan %1!lu! Call ProcessInput oncomponent (%1!lu!) for input (%1!lu!) .Create new execution item for subpath %1!lu! Create new row view for %1!s! Call PrimeOutput on output (%1!lu!)  unknown output 2Various information from Data Flow initialization.After execution, some buffers were left outstanding. Some transform is hanging onto buffer references and if it freed them we would use a lot less memory. The logged text includes information about the transform that owned the buffer, and the ID of the buffer.{The Data Flow changed the size of a buffer away from the default values. The event text includes the sizes and the reasons.<The input the scheduler had when forming the execution plan.$The execution plan of the data flow.Information about each phase of validation and execution for each data flow component. The processing time of each phase is specified.,The %1!s! spent %2!u! milliseconds in %3!s!.3A component has returned from its PrimeOutput call.*PrimeOutput will be called on a component.5Rows were provided to a data flow component as input.3A component will be given the end of rowset signal.4A component has finished processing all of its rows..Numeric identifier of the data flow component. Name of the data flow component.2The buffer object that was given to the component.)Numeric identifier of the data flow path.Name of the data flow path..Numeric identifier of the data flow component. Name of the data flow component. Numeric identifier of the input.Name of the input..Numeric identifier of the data flow component. Name of the data flow component. Numeric identifier of the input.Name of the input.hThe data flow will not remove unused components because its RunInOptimizedMode property is set to false.Rows in buffer type %1!d! are larger than the configured maximum for the entire buffer size. There will be only one row in buffers of this type.Rows in buffer type %1!d! would cause a buffer size greater than the configured maximum. There will be only %2!d! rows in buffers of this type.Rows in buffer type %1!d! would cause a buffer size less than allocation minimum, which is %2!d! bytes. There will be %3!d! rows in buffers of this type.An unidentified componenta%1!s! leaked a buffer with ID %2!d! of type %3!d! with %4!d! rows and a reference count of %5!d!.TThis is a virtual buffer referencing the physical buffer with ID %d for its storage."There are no trees to be executed.The execution plan is empty.%1!s!;Microsoft Corporation; Microsoft SqlServer v10; (C) Microsoft Corporation; All Rights Reserved; http://www.microsoft.com/sql/support;%2!ld!YExternalRequest_pre: The object is ready to make the following external request: '%1!s!'.BExternalRequest_post: '%1!s!'. The external request has completed.=Specifies the ID of the input column used in the aggregation."Specifies the type of aggregation.Specifies the approximate number of group by keys that the aggregation produces. Low indicates 500,000 key values, Medium indicates 5 million key values, and High indicates more than 25 million key values. Unspecified indicates no KeyScale value is used.Specifies the exact number of group by keys that the aggregation produces. If a KeyScale value is specified, the value in Keys takes precedence.tSpecifies the percentage by which memory can be extended during the aggregation. Valid values are between 1 and 100.*Aggregates and groups values in a dataset.Aggregate Input 1Aggregate Output Indicates whether the column requires special handling because it may contain oversized data or it requires precision beyond a float data type.(Specifies the string comparison options.NIndicates whether a warning is posted when an arithmetic overflow is detected. AggregateGroup byCount Count allCount distinctSumAverageMinimumMaximum UnspecifiedLowMediumHighSpecifies the approximate number of distinct values in a column that the aggregation counts distinctly. Low indicates 500,000 key values, Medium indicates 5 million key values, and High indicates more than 25 million key values. Unspecified indicates no CountDistinctScale value is used.Specifies the exact number of distinct values in a column that the aggregation counts distinctly. If a CountDistinctScale value is specified, the value in CountDistinctKeys takes precedence.?Looks up values in a reference dataset by using exact matching.Type of Reference Data Source.9[InputColumnName,ReferenceColumnName]; one or more times.A[ReferenceColumnName,InputOrOutputColumnName]; one or more times.<Specifies the SQL statement that generates the lookup table.LSpecifies a SQL statement that uses parameters to generate the lookup table.Runtime Connection Name..Connection manager used to access lookup data.-Specifies the cache type of the lookup table.LookupFullPartialNone>Maximum Memory Usage for Reference Cache on a 64 bit platform.FSpecifies the type of connection used to access the reference dataset.OLEDBCachehSpecifies how the Lookup transformation handles rows without matching entries in the reference data set..Treat rows with no matching entries as errors.:Send rows with no matching entries to the no match output.Lookup No Match OutputThe Lookup output that handles rows with no matching entries in the reference dataset. Use this output when the NoMatchBehavior property is set to "Send rows with no matching entries to the no match output."sSpecifies the percentage of the cache that is allocated for rows with no matching entries in the reference dataset.>Maximum Memory Usage for Reference Cache on a 32 bit platform.9Indicates whether to reference metadata in an XML format.BConvert joined and copied column datatypes to match automatically.Specifies the list of lineage identifiers that map to the parameters that the SQL statement in the SQLCommand property uses. Entries in the list are separated by semicolons. Lookup InputLookup Match OutputLookup Error OutputJSpecifies the column in the reference table from which a column is copied.@Specifies the column in the reference table that a column joins.`Set to True if Lookup should validate reference metadata during validate, false if it should not Copy ColumnLookupCopies columns.7Specifies the lineage identifier of the column to copy.Copy Column InputCopy Column Output Copy Column,Applies string operations to character data.Character Map InputCharacter Map Output)Specifies the string operations to apply.OSpecifies the lineage identifier of the input column to which operations apply.Character Map Error Output Map Column Character Map Lowercase Uppercase Byte reversalHiraganaKatakana Half width Full widthLinguistic casingSimplified ChineseTraditional Chinese6Creates a more normalized representation of a dataset. Unpivot InputUnpivot OutputUnpivotbIndicates whether the value in the PivotKeyValue property is written to the column in the new row.The lineage ID of the output column that will receive the value from this input column. Or -1 if column s value is not used in output rows.ESpecifies the value written to the PivotKey column in the output row.Deprecated Union All TransformDeprecated Union All Input %dDeprecated Union All Output;Specifies the maximum number of threads to use for sorting.SortRIndicates whether the Sort transformation removes rows with duplicate sort values. Sort Output Sort Input4Specifies the comparison options for character data.'Specifies the sort order of the column.`Specifies the lineage identifier of the input column that is associated with this output column. Sorts data.Slowly Changing Dimension$Updates a slowly changing dimension.Slowly Changing Dimension Input,Input to Slowly Changing Dimension TransformUnchanged OutputUnchanged rows New OutputNew rowsFixed Attribute Output2Rows with changes in columns tagged FixedAttribute!Changing Attribute Updates OutputPThese rows contain new information for ChangingAttribute (Type 1) column changes#Historical Attribute Inserts Output=HistoricalAttribute (Type 2) changes result in these new rowsInferred Member Updates Output7Information to update unpopulated initial dimension rowLookupConnection@Defines the connection to use for lookups in the dimension tableAll newDetectOtherKeyChanging attributeHistorical attributeFixed attribute>Specifies the SELECT statement used to create a schema rowset.[Specifies that all rows in the input are new or the transformation detects the change type.xIndicates whether historical attribute updates are directed to the transformation output for changing attribute updates.xSpecifies the WHERE clause in the SELECT statement that selects the current row among rows with identical business keys.^Indicates whether the transformation fails when columns with fixed attributes contain changes.UIndicates whether the transformation fails when a lookup of an existing record fails.7Indicates whether inferred member updates are detected.2Specifies the column name for the inferred member.HPivots a dataset to create a less normalized representation of the data.Pivot Default InputPivot Default OutputPivot Error OutputPivot Key LookupPivotsDescribes how the input column participates in the pivot operation. It can help define a set of related records, it can hold the key that uniquely identifies a row within the related set and upon which the values are pivoted, it can hold a value that gets transferred into a pivoted column or it can contain a value that simply passes straight though the pivot unchanged.The lineage identifier of an input column that contains a PivotedValue or the value -1. The value -1 indicates the column is not used in a pivot operationSpecifies a value that matches one of the possible values from the input column that is marked as the pivot key by the PivotUsage property.(Can't Convert to String) Not PivotedSet Key Pivot Key Pivoted ValueMerge Join Left InputMerge Join Right InputMerge Join Output Merge Join&Specifies the type of join to perform.4Specifies the number of columns in the ordering key.>Indicates whether the null values are treated as equal values.Full outer joinLeft outer join Inner join.Merges data from two datasets by using a join.3The input column associated with this output columnBIndicates the requested maximum buffers per input before blocking.+Specifies the number of rows in the sample.,Specifies the percentage size of the sample.3Specifies the seed for the random number generator.Sampling Selected OutputSampling Unselected OutputOutput for rows in the sample Row SamplingGCreates a sample dataset by extracting a number of rows from a dataset.Percentage SamplingKCreates a sample dataset by extracting a percentage of rows from a dataset.-Specifies the variable to hold the row count. Row CountCounts the rows in a dataset.Multicast Input MulticastCreates copies of a dataset.Multicast Output %%d Merge Input 1 Merge Input 2 Merge OutputMergeMerges two sorted datasets.FThe lineage ID of the output column associated with this input column.0Specifies which audit item this column produces.,Adds audit information to rows in a dataset.AuditExecution instance GUID Package ID Package name Version IDExecution start time Machine name User name Task nameTask IDKSpecifies the ID of the input column. The value must be a positive integer.Fuzzy Grouping InputFuzzy Grouping OutputFuzzy Grouping5Groups rows in a dataset that contain similar values.QSpecifies the minimum similarity threshold, expressed as a value between 0 and 1.bSpecifies the white-space and punctuation characters used to separate strings into multiple words.LIndicates whether to use the column to determine if two records are similar.ZSpecifies whether rows must be exact matches or fuzzy matches to be considered duplicates.Specifies the hint that causes similar rows that contain differing leading or trailing numeric values be less likely to be considered duplicates.|Specifies the relative contribution of a match in this column to the similarity score. The value must be a positive integer.Specifies the minimum textual similarity required between input and fuzzy matched records on values in this particular join column. Must be a floating point value between 0.0 and 1.0.2Specifies how string data in a column is compared.WSpecifies the column type. The valid values are KeyIn, KeyOut, Score, Input, and Clean.NumeralsNotSpecialLeadingNumeralsSignificantTrailingNumeralsSignificant%LeadingAndTrailingNumeralsSignificantReading input recordsFinding similar records(Picking canonical records for each groupWriting output recordsFuzzy Grouping Inner Data FlowSpecifies the maximum amount of memory (MB) that transform should use. A value of 0 will enable dynamic memory usage based upon requirements and available system memory.&If true, each input record will be directly compared against EVERY other input record. This may produce slightly better results, but will be extremely slow unless the number of input records is very small. This option is primarily intended for debugging purposes and should be used with care.FullySensitive IgnoreCaseIgnoreNonSpaceIgnoreKanaType IgnoreWidth IgnoreSymbolsExactFuzzy UndefinedKeyInKeyOut SimilarityColumnSimilarityPassThru Canonical?Specifies the ID of the output column to receive the file data.0TRUE if the file is expected to begin with a BOMImport Column Error Output Import Column-Imports data from files to rows in a dataset.Import Column OutputImport Column Input File Read File Open6Exports column values from rows in a dataset to files.Export Column OutputExport Column InputASpecifies the ID of the input column that contains the file data.AIndicates whether to write a byte-order marker (BOM) to the file.5Indicates whether to append data to an existing file.:Indicates whether to force truncation of an existing file.Export Column Error Output Export Column File Write File OpenDerived Column'Updates column values using expressionsDerived Column Output3Default Output of the Derived Column TransformationDerived Column Error Output1Error Output of the Derived Column TransformationDerived Column Input*Input to the Derived Column TransformationDerived Column Expression"Derived Column Friendly Expression Computation ComputationData ConversionData Conversion InputData Conversion OutputData Conversion Error Output ConversionISpecifies the input column used as the source of data for the conversion.NIndicates whether the column uses the faster, locale-neutral parsing routines.MConverts columns to different data types and adds the columns to the dataset.Union All Input %1!lu!Union All Output Union AllMerges multiple datasets.FThe lineage ID of the output column associated with this input column. Fuzzy LookupIndicates whether to reuse an existing fuzzy-match index table or build a new one. The new index table can be temporary, persisted, or persisted and maintained.ZSpecifies the name of the reference table or view against which to match incoming records.Specifies the name of the fuzzy-match index table to be reused or the table name in which a newly generated fuzzy-match index table will be written.Indicates whether to drop an existing fuzzy-match index table if transformation is configured to generate a new table and a table with the same name already exists.Whether a copy of the reference table should be made for fuzzy match index construction and subsequent matching. It is strongly recommended that a copy always be made, otherwise unpredictable output may result if the contents of the reference table change during package execution (or between executions if reusing a fuzzy match index). If this property is set to false, the reference table must additionally have a clustered index built upon an integer identity column.<Specifies the metadata of the reference table in XML format.LSpecifies the maximum number of fuzzy matches to return for an input record.fSpecifies the minimum similarity score required for the match results that the transformation returns.Specifies the set of white-space and punctuation characters used to separate fuzzy match fields into the words used for matching and scoring a fuzzy lookup.Specifies the maximum amount of memory (MB) that this instance of the Fuzzy Lookup transformation can use. If 0 is given, then the transform will dynamically adjust it's memory usage based upon it's requirements and available system memory.?Looks up values in a reference dataset by using fuzzy matching.Microsoft SQL Server connection and database in which the fuzzy lookup reference table is located and where the fuzzy match index table will be stored and accessed.pSpecifies the comparison options for character data. The options are those supported by the FoldString function.ZFlags to FoldString that should be used when comparing whether two strings are equivalent.|Specifies the relative contribution of a match in this column to the similarity score. The value must be a positive integer.If true, Fuzzy Lookup will partially load the error-tolerant index and the reference table into main memory before execution begins. This can greatly enhance performance if many input rows are to be processed.If true, each input record will be directly compared against EVERY record in the reference table. This may produce slightly better results, but will be extremely slow unless the number of records in the reference table is very small. Note that the entire reference table will be loaded into main memory if this option is enabled. This option is primarily intended for debugging purposes and should be used with care.Fuzzy Lookup InputFuzzy Lookup Output(Taking a snapshot of the reference tableBuilding Fuzzy Match IndexWarming cachesRows processed by Fuzzy LookupRows ProcessedSpecifies the minimum textual similarity required between input and fuzzy matched records on values in this particular join column. Must be a floating point value between 0.0 and 1.0.JSpecifies whether the column is used to indicate confidence or similarity.QName of the reference table column with which the input column should be matched.9Specifies whether the match is a fuzzy or an exact match.XSpecifies the reference table column that is copied to the output for each match result.mSpecifies the lineage identifier of the input column from which to copy to this output column for each match.LCID of the input string data.ReuseExistingIndexGenerateNewIndexGenerateAndPersistNewIndexGenerateAndMaintainNewIndexFullySensitive IgnoreCaseIgnoreNonSpaceIgnoreKanaType IgnoreWidth IgnoreSymbols NoMappingMapPrecomposed MapFoldCZone MapFoldDigitsMapExpandLigaturesFuzzyExact Undefined Undefined Similarity ConfidenceCandidateScoreNextCandidateScore ThresholdFullTokensFetched QGramsFetched CandidatesRefTuplesFetched _FullTokens_FullTokensIndexed_FullTokensFetched_QGrams_QGramsIndexed_QGramsFetched _Candidates_RefTuplesFetchedInvalidColumnTypeCandidateWeight SuccessiveUnsuccessfulRefFetches_HighFreqTokensFetched _FFMDuration_NextBestScoreColumnSimilarityFullFieldsFetched _FullFields_FullFieldsIndexed_FullFieldsFetched Term LookupICounts the frequency that terms in a reference table appear in a dataset.Term Lookup Input#Input to Term Lookup TransformationTerm Lookup Output!Output from Term Lookup Transform@Specifies the reference table that contains the reference terms.Specifies the column that contains the reference terms. The column must be in the table specified in the property RefTermTable.Specifies whether the column is a pass-thru column and whether it is a lookup column. 0 indicates a pass-thru column, 1 indicates a lookup column, and 2 indicates the column is both a pass-thru and a lookup column.RSpecifies the lineage ID of the upstream column that is referenced by this column.Term from the reference data.9The frequency of the term appearing in every text string.9Indicates whether the lookup operation is case-sensitive.Term Lookup Error Output'Error Output from Term Lookup Transform Term LookupTerm Extraction%Extracts terms from data in a column.Term Extraction Input'Input to Term Extraction TransformationTerm Extraction Output*Output from Term Extraction TransformationTerm,Output Column from Term Extraction TransformReference Data Connection.)Indicates whether to use exclusion terms.=Specifies the reference table containing the inclusion terms.YSpecifies the column containing the inclusion terms. The column should be in InTermTable.=Specifies the reference table containing the exclusion terms.|Specifies the column containing the exclusion terms. The column must be in the table specified in the property OutTermTable.xSpecifies the type of terms to extract. 0 specifies noun, 1 specifies noun phrase, and 2 specifies noun and noun phrase.BSpecifies the frequency threshold above which a term is extracted.0Specifies the maximum number of words in a term.@Specifies whether the Term Extraction process is case sensitive.Term Extraction Error Output+Error Output from Term Extraction TransformTerm ExtractionScore,Output Column from Term Extraction TransformSpecifies what kind of score to be associated with a term. 0 means using the frequency of a term as the score; 1 means using the TFIDF of a term as the score.SQL Server DestinationSQL Server Destination Input#SQL Server Destination Error Output2The name of the database object to copy data into.Specifies that any constraints on the destination object are checked during the data copy operation. Corresponds to the bulk insert CHECK_CONSTRAINTS option.^Specifies the number of the first row to copy. Corresponds to the bulk insert FIRSTROW option.Specifies that any insert triggers defined on the destination object will execute during the data copy. Corresponds to the bulk insert FIRE_TRIGGERS option.Indicates whether the values supplied for identity columns will be copied to the destination. If false, values for identity columns will be auto-generated at the destination. Corresponds to the bulk insert KEEPIDENTITY option.Indicates whether the columns containing null will have null copied to the destination. If false, columns containing null will have their default values inserted at the destination. Applies only if fast load is turned on.\Specifies the number of the last row to copy. Corresponds to the bulk insert LASTROW option.Specifies the maximum number of syntax errors and compilation errors that can occur before the data copy operation is canceled. Each row that cannot be copied is ignored and counted as one error. Corresponds to the bulk insert MAXERRORS option.[Specifies how the data to be copied is sorted. Corresponds to the bulk insert ORDER option.Specifies that a table-level lock is acquired for the duration of the data copy operation. Corresponds to the bulk insert TABLOCK option.Specifies the file used to collect rows that have formatting errors. These rows are copied into this error file "as is." Corresponds to the bulk insert errorfile option.Specifies in seconds the amount of time the SQL Destination adapter will wait before terminating if there is no data available for insertion. A timeout value of 0 means the SQL Destination adapter will not time out.&Loads data into a SQL Server database.Specifies when commits are issued during data insertion. A value of 0 specifies that one commit will be issued at the end of data insertion.3Specifies the variable that contains the recordset.1Creates and populates an in-memory ADO recordset.Recordset Destination InputRecordset Destination File nameFile name from variable+Specifies the mode used to access the file.0Name of the variable that contains the file nameExtracts data from a raw file.!Specifies the name of the source.Output from %sRaw File Source OutputRaw File SourceOutput file name3Indicates what option is used to write to the file.Loads raw data into a file.Raw File DestinationRaw File Destination InputTFile %1!s! already exists. The raw file destination adapter will append to the file.SFile %1!s! already exists. The raw file destination adapter will truncate the file.QFile %1!s! does not exist. The raw file destination adapter will create the file. Create Always Create OnceAppendTruncate and Append OLE DB SourceOLE DB Source OutputOLE DB Source Error Output ConversionEThe mappings between the parameters in the SQL command and variables.EExtracts data from a relational database by using an OLE DB provider.OLE DB DestinationOLE DB Destination InputOLE DB Destination Error OutputInsertIndicates whether the values supplied for identity columns will be copied to the destination. If false, values for identity columns will be auto-generated at the destination. Applies only if fast load is turned on.Indicates whether the columns containing null will have null inserted in the destination. If false, columns containing null will have their default values inserted at the destination. Applies only if fast load is turned on.USpecifies options to be used with fast load. Applies only if fast load is turned on.Specifies when commits are issued during data insertion. A value of 0 specifies that one commit will be issued at the end of data insertion. Applies only if fast load is turned on.BLoads data into a relational database by using an OLE DB provider.OLE DB Command Error OutputOLE DB Command OutputCommand ExecutionAParameter information. Matches OLE DB's DBPARAMFLAGSENUM values. Excel SourceExcel Source OutputExcel Source Error Output Conversion<The mapping from parameters in the SQL command to variables.Excel DestinationExcel Destination InputExcel Destination Error OutputInsertIf true, values supplied for identity columns will be inserted at the destination. If false, values for identity columns will be auto-generated at the destination. Applies only if fast load is turned on.If true, columns containing null will have null inserted at the destination. If false, columns containing null will have their default values inserted at the destination. Applies only if fast load is turned on.USpecifies options to be used with fast load. Applies only if fast load is turned on.Specifies when commits are issued during data insertion. A value of 0 specifies that one commit will be issued at the end of data insertion. Applies only if fast load is turned on.OLE DB Command%Extracts data from an Excel workbook."Loads data into an Excel workbook.There was no error.$Deferred accessor validation failed.EThe value could not be converted because of a potential loss of data.&A null value was sent to the provider.RText was truncated or one or more characters had no match in the target code page.6A signed value was provided for an unsigned data type.GConversion failed because the data value overflowed the specified type.DBSTATUS_E_CANTCREATEDBSTATUS_UNAVAILABLE6User does not have permission to write to this column.<The value violated the integrity constraints for the column.:The value violated the schema's constraint for the column.5The status value given to the provider was not valid.JThe provider used the default value for an input parameter in a procedure.The status value is unknown._The number of seconds before a command times out. A value of 0 indicates an infinite time-out.@Specifies the name of the database object used to open a rowset.[Specifies the variable that contains the name of the database object used to open a rowset.The SQL command to be executed./Specifies the mode used to access the database.:The OLE DB runtime connection used to access the database. OpenRowsetOpenRowset From Variable SQL CommandOpenRowset Using FastLoad'OpenRowset Using FastLoad From Variable:The variable that contains the SQL command to be executed.SQL Command From VariableeSpecifies the column code page to use when code page information is unavailable from the data source.TForces the use of the DefaultCodePage property value when describing character data.Flat File DestinationFlat File Destination Input"Flat File Destination Error OutputLoads data into a flat file.Flat File SourceFlat File Source OutputFlat File Source Error Output$Flat File Source Error Output Column ConversionNIndicates whether the column uses the faster, locale-neutral parsing routines.Extracts data from flat files./Indicates whether the data is in binary format.:Specifies whether zero-length columns are treated as null.Specifies the name of an output column containing the file name. If no name is specified, no output column containing the file name will be generated.LSpecifies whether the data will overwrite or append to the destination file.OSpecifies the text to write to the destination file before any data is written.Conditional Split(Evaluates and directs rows in a dataset. Conditional Split Default Output6Default Output of the Conditional Split TransformationConditional Split Error Output4Error Output of the Conditional Split TransformationConditional Split Input-Input to the Conditional Split TransformationConditional Split Output %d1Output %d of the Conditional Split TransformationcSpecifies the expression. This expression version uses lineage identifiers instead of column names.\Specifies the friendly version of the expression. This expression version uses column names.Specifies the position of the condition in the list of conditions that the transformation evaluates. The evaluation order is from the lowest to the highest value. Computation2Executes an SQL command for each row in a dataset.OLE DB Command InputCache TransformInserts rows into a cache. Cache Input Cache Output(The connection used to access the cache.2The cache column associated with the input column.`0S *H D0@1 0 +0h +7Z0X03 +70% <<<Obsolete>>>0!0 +x6giIjX10`0L .P\0 +0p1+0)U "Copyright (c) 1997 Microsoft Corp.10U Microsoft Corporation1!0UMicrosoft Root Authority0 070822223102Z 120825070000Z0y1 0 UUS10U Washington10URedmond10U Microsoft Corporation1#0!UMicrosoft Code Signing PCA0"0  *H 0 y}]E941%5IwEqFԌkLRbMIl/$>e# HuEP%+ #A$bEJͳ/"J-|o;99ݽ)f;-2'Hlc\򸔣8P'N0==l9.4. }bxfs Oc,2EJ;PSfQyV>Pn5{$Rf=N+~3nGўJnS00U% 0 +0U0[pir#Q~Mˡr0p1+0)U "Copyright (c) 1997 Microsoft Corp.10U Microsoft Corporation1!0UMicrosoft Root Authority<<>c@0U00Uvp[NQD.Dc0 U0 +{~J&μNtX't*uLxMi|CʇSŸVocDDȚ  )}s9=j8m҈#i 4|.)Bk(q8 ]hͽAkf4|zB{ p֒O8|-=4 b7j#\cZ9`U3; _,˫ 0  *RA^0(ip΀Brv0z0b a>0  *H 0y1 0 UUS10U Washington10URedmond10U Microsoft Corporation1#0!UMicrosoft Code Signing PCA0 091207224029Z 110307224029Z01 0 UUS10U Washington10URedmond10U Microsoft Corporation1 0 U MOPR10UMicrosoft Corporation0"0  *H 0 0ErSkO#=Y@8s&S<B8auM\F^i[s)DdY/]nǫ9eħuꇗ&&v89&+ZT!򗍇)I,?}͖ q B5׸?ݎE }K[vrw#}5]PKGmUTN؁B1U SI0奈|^zZ7i2$00U% 0 +0U8xs2_Uƙt0U0U#0vp[NQD.Dc0DU=0;09753http://crl.microsoft.com/pki/crl/products/CSPCA.crl0H+<0:08+0,http://www.microsoft.com/pki/certs/CSPCA.crt0  *H ( oBvCB$e4uq0r7:pQ q(ps{V4ZrN {DO aun+Z 9`jT;1_,nM*v{Ƈֱ*rTn*ȞobKk=/zw9gj{cI&Bޕ" _s2r6)$ROfH&9iPNl.r!m$H@|t8JO00j O%EXzg0  *H 0p1+0)U "Copyright (c) 1997 Microsoft Corp.10U Microsoft Corporation1!0UMicrosoft Root Authority0 060916010447Z 190915070000Z0y1 0 UUS10U Washington10URedmond10U Microsoft Corporation1#0!UMicrosoft Timestamping PCA0"0  *H 0 7nBJqH>S,2ORȃ>3I1(dPKuǨծipfx'f趷 Y")/@VvmdmJT޿ǀL7VhGv\/}%V[jc|<%M9wt]\؆7,u9 |vlnz>q_*Ob`2҃N+"\hE/Pl%ׅvs6ƕz`3[AXn,HoCj&k(0$0U% 0 +0U0[pir#Q~Mˡr0p1+0)U "Copyright (c) 1997 Microsoft Corp.10U Microsoft Corporation1!0UMicrosoft Root Authority<<>c@0 +70UoN?4K;AC0 +7  SubCA0 U0U00  *H M1|PapEsT? -QS9V ތ;ɷQ!oi~k"Flm|"Fӄ6~p]Eݎ*|ɮ2Շc6!v;s!شTeJ(&`;exHϭ:ObX099!dcC/{FeJtn ̝(a|H!8Ŗ2@S=f7"̰wTQ:rD#00 a- 0  *H 0y1 0 UUS10U Washington10URedmond10U Microsoft Corporation1#0!UMicrosoft Timestamping PCA0 080725190217Z 130725191217Z01 0 UUS10U Washington10URedmond10U Microsoft Corporation1 0 U MOPR1'0%U nCipher DSE ESN:7A82-688A-9F921%0#UMicrosoft Time-Stamp Service0"0  *H 0  BQzٕ܇7(vMPv*GVi~MquۻCloq+|Δź~a-0&-v?F؉7[hd֋=vP'`]),<ϥZ5O8a?=6Vw;]7'c,HUl+~w$*p*$Ţ"KLYwTrC>dݓXI 63~Ly2Y)0 +0 *H  1  +70 +7 10  +70# *H  1vm/YRY[ZE0X +7 1J0H&$SQL Server 2008 R2http://www.microsoft.com/sql0  *H qbx%?YmfLoIQ~\ ?0: s8,-3m1)T[3-IȲR wS弞uCk/<|HH|4o(P_N;5KvDT4Mc"c`ZK$E^ykc %J{$) (ӜgB;'m ~?[UO^˓ǖ(ncس1m_f00 *H  1 000y1 0 UUS10U Washington10URedmond10U Microsoft Corporation1#0!UMicrosoft Timestamping PCA a- 0+]0 *H  1  *H 0 *H  1 100403173359Z0# *H  1]1!J9_Yt'n~,f0  *H MIb{rx! JFҨ6&_x6;Һ[l ߙ٦{t#qӖ0i7<V?Mċ{}o0aDQ`]V9zdo?t[4U0\5g7ƃ]8ȣ(;sM]@UA14m4^}v2.lhsiO:Ǖ4TóHssk2kgR,+@n