Recently, I've posted a question and got answered .
http://social.msdn.microsoft.com/Forums/en-US/sqlintegrationservices/thread/abd24259-e53f-40b2-af94-0dad06c909e9
My gain is to log the Column Metadata info after OLE Source as well as the column data and put into a log file on a daily basis.
I've used a Script component (transformation) and code is below.
Here is the problem:
1. Column Metadata info tends to be populated .However in the log file, column metadata seems to be repeated by many many times.
2. I seem to have two log files instead of one consolidated log file. My goal is to put column metadata as well as column data into one single log file with datetime stamp.
Any comments?
Thanks
/* Microsoft SQL Server Integration Services Script Component * Write scripts using Microsoft Visual C# 2008. * ScriptMain is the entry point class of the script.*/ using System; using System.Data; using System.Text; using System.IO; using System.Globalization; using System.Collections.Generic; using Microsoft.SqlServer.Dts.Pipeline; using Microsoft.SqlServer.Dts.Pipeline.Wrapper; using Microsoft.SqlServer.Dts.Runtime; using Microsoft.SqlServer.Dts.Runtime.Wrapper; using System.Runtime.InteropServices; [Microsoft.SqlServer.Dts.Pipeline.SSISScriptComponentEntryPointAttribute] public class ScriptMain : UserComponent { public string appName; // purpose of this package, HS, Feb.20/2013 //public string log_file_name ; public string log_path ; public string log_file; public override void PreExecute() // this method will log column infos from source such as name, type and lengh { //log_file_na = "D:\\Test_" + DateTime.Now.ToString("yyyyMMdd") + ".log"; log_path = @"c:\" + this.Variables.varapplicationName; log_file = log_path + "\\" + DateTime.Now.ToString("yyyyMMddhhmm") + ".log"; if (!File.Exists(log_file)) // if file path does not exit, creat one. HS Feb.20/2013 { Directory.CreateDirectory(log_path); } StreamWriter tw = File.AppendText(log_file); //base.PreExecute(); IDTSInput100 input = ComponentMetaData.InputCollection[0]; //tw.Write("Source Column Infos: "); tw.WriteLine(); for (int x = 0; x < input.InputColumnCollection.Count; x++) { tw.Write(input.InputColumnCollection[x].Name + " - " + input.InputColumnCollection[x].DataType + " - " + input.InputColumnCollection[x].Length + "|| "); } tw.WriteLine(); tw.Close(); base.PreExecute(); } public override void PostExecute() { base.PostExecute(); } public override void Input0_ProcessInputRow(Input0Buffer Row) { /* Add your code here */ } public override void ProcessInput(int InputID, PipelineBuffer Buffer) //this method will output each column value during run time for each buffer load -- HS ,Feb.13/2013 { if (!File.Exists(log_path)) // if file path does not exit, creat one. HS Feb.20/2013 { Directory.CreateDirectory(log_path); } StreamWriter tw = File.AppendText(log_file); while (Buffer.NextRow()) { for (int columnIndex = 0; columnIndex < Buffer.ColumnCount; columnIndex++) { tw.Write(Buffer[columnIndex] + " | "); } tw.WriteLine(); } tw.Close(); base.ProcessInput(InputID, Buffer); } }
--Currently using Reporting Service 2000; Visual Studio .NET 2003; Visual Source Safe SSIS 2008 SSAS 2008, SVN --