Exporting SharePoint usage log files into a database using LogParser

前端 未结 5 1784
臣服心动
臣服心动 2021-01-01 06:38

So basically we have lots of SharePoint usage log files generated by our SharePoint 2007 site and we would like to make sense of them. For that we\'re thinking of reading th

5条回答
  •  抹茶落季
    2021-01-01 07:10

    You'll have to write a plugin to logparser. Here is what I did:

    [Guid("1CC338B9-4F5F-4bf2-86AE-55C865CF7159")]
    public class SPUsageLogParserPlugin : ILogParserInputContext
    {
        private FileStream stream = null;
        private BinaryReader br = null;
        private object[] currentEntry = null;
    
        public SPUsageLogParserPlugin() { }
    
        #region LogParser
    
        protected const int GENERAL_HEADER_LENGTH = 300;
        protected const int ENTRY_HEADER_LENGTH = 50;
        protected string[] columns = {"TimeStamp",
                                      "SiteGUID",
                                      "SiteUrl",
                                      "WebUrl",
                                      "Document",
                                      "User",
                                      "QueryString",
                                      "Referral",
                                      "UserAgent",
                                      "Command"};
    
        protected string ReadString(BinaryReader br)
        {
            StringBuilder buffer = new StringBuilder();
            char c = br.ReadChar();
            while (c != 0) {
                buffer.Append(c);
                c = br.ReadChar();
            }
            return buffer.ToString();
        }
    
        #endregion
    
        #region ILogParserInputContext Members
    
        enum FieldType
        {
            Integer = 1,
            Real = 2,
            String = 3,
            Timestamp = 4
        }
    
        public void OpenInput(string from)
        {
            stream = File.OpenRead(from);
            br = new BinaryReader(stream);
            br.ReadBytes(GENERAL_HEADER_LENGTH);
        }
    
        public int GetFieldCount()
        {
            return columns.Length;
        }
    
        public string GetFieldName(int index)
        {
            return columns[index];
        }
    
        public int GetFieldType(int index)
        {
            if (index == 0) {
                // TimeStamp
                return (int)FieldType.Timestamp;
            } else {
                // Other fields
                return (int)FieldType.String;
            }
        }
    
        public bool ReadRecord()
        {
            if (stream.Position < stream.Length) {
                br.ReadBytes(ENTRY_HEADER_LENGTH); // Entry Header
    
                string webappguid = ReadString(br);
    
                DateTime timestamp = DateTime.ParseExact(ReadString(br), "HH:mm:ss", null);
                string siteUrl = ReadString(br);
                string webUrl = ReadString(br);
                string document = ReadString(br);
                string user = ReadString(br);
                string query = ReadString(br);
                string referral = ReadString(br);
                string userAgent = ReadString(br);
                string guid = ReadString(br);
                string command = ReadString(br);
    
                currentEntry = new object[] { timestamp, webappguid, siteUrl, webUrl, document, user, query, referral, userAgent, command };
                return true;
            } else {
                currentEntry = new object[] { };
                return false;
            }
        }
    
        public object GetValue(int index)
        {
            return currentEntry[index];
        }
    
        public void CloseInput(bool abort)
        {
            br.Close();
            stream.Dispose();
            stream = null;
            br = null;
        }
    
        #endregion
    }
    

提交回复
热议问题