From b68950d30127fe7791bf4276122cd65c82dd3d1d Mon Sep 17 00:00:00 2001 From: KoalaBear Date: Sun, 2 Oct 2022 23:11:07 +0200 Subject: [PATCH] - First version with structured logging with Serilog - ToUpperInvariant, ToLowerInvariant --- .../GoogleDriveIndexer.cs | 51 +- ...OpenDirectoryDownloader.GoogleDrive.csproj | 2 +- .../AngleSharpExtensions.cs | 2 +- src/OpenDirectoryDownloader/BrowserContext.cs | 43 +- .../Calibre/CalibreParser.cs | 31 +- src/OpenDirectoryDownloader/Command.cs | 24 +- .../DirectoryParser.cs | 74 +- .../FileUpload/AnonFiles.cs | 11 +- .../FileUpload/GoFileIo.cs | 11 +- .../FileUpload/Pixeldrain.cs | 11 +- .../FileUpload/UploadFilesIo.cs | 7 +- .../FileUpload/ZippyShare.cs | 10 +- src/OpenDirectoryDownloader/FtpParser.cs | 49 +- .../Helpers/FileSizeHelper.cs | 9 +- .../Helpers/UrlHeaderInfoHelper.cs | 24 +- src/OpenDirectoryDownloader/Library.cs | 13 +- src/OpenDirectoryDownloader/NLog.config | 75 - src/OpenDirectoryDownloader/NLog.xsd | 3168 ----------------- .../OpenDirectoryDownloader.csproj | 10 +- .../OpenDirectoryIndexer.cs | 238 +- src/OpenDirectoryDownloader/Program.cs | 31 +- .../Properties/launchSettings.json | 2 +- .../Site/AmazonS3/AmazonS3Parser.cs | 10 +- .../BlitzfilesTech/BlitzfilesTechParser.cs | 16 +- .../Site/CrushFtp/CrushFtpParser.cs | 13 +- .../Site/Dropbox/DropboxParser.cs | 12 +- .../Site/GDIndex/BhadooIndexParser.cs | 24 +- .../Site/GDIndex/GdIndexParser.cs | 24 +- .../Site/GDIndex/Go2IndexParser.cs | 24 +- .../Site/GDIndex/GoIndexParser.cs | 25 +- .../Site/GDIndex/GoogleDriveIndexMapping.cs | 2 +- .../Site/GitHub/GitHubParser.cs | 24 +- .../Site/GoFileIO/GoFileIOParser.cs | 14 +- .../Site/Mediafire/MediafireParser.cs | 12 +- .../Site/Pixeldrain/PixeldrainParser.cs | 12 +- 35 files changed, 401 insertions(+), 3707 deletions(-) delete mode 100644 src/OpenDirectoryDownloader/NLog.config delete mode 100644 src/OpenDirectoryDownloader/NLog.xsd diff --git a/src/OpenDirectoryDownloader.GoogleDrive/GoogleDriveIndexer.cs b/src/OpenDirectoryDownloader.GoogleDrive/GoogleDriveIndexer.cs index d8f4925a..8e4212ec 100644 --- a/src/OpenDirectoryDownloader.GoogleDrive/GoogleDriveIndexer.cs +++ b/src/OpenDirectoryDownloader.GoogleDrive/GoogleDriveIndexer.cs @@ -3,9 +3,9 @@ using Google.Apis.Drive.v3; using Google.Apis.Services; using Google.Apis.Util.Store; -using NLog; using OpenDirectoryDownloader.Shared; using OpenDirectoryDownloader.Shared.Models; +using Serilog; using System; using System.IO; using System.Linq; @@ -14,39 +14,40 @@ namespace OpenDirectoryDownloader.GoogleDrive; -public static class GoogleDriveIndexer +public class GoogleDriveIndexer { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); - // If modifying these scopes, delete your previously saved credentials // at ~/.credentials/drive-dotnet-quickstart.json private static readonly string[] Scopes = { DriveService.Scope.DriveMetadataReadonly }; - private static readonly DriveService DriveService; + private static DriveService DriveService; private static readonly string ApplicationName = "OpenDirectoryDownloader"; private const string FolderMimeType = "application/vnd.google-apps.folder"; private const string ShortcutMimeType = "application/vnd.google-apps.shortcut"; private static readonly RateLimiter RateLimiter = new(900, TimeSpan.FromSeconds(100), 0.9d); - static GoogleDriveIndexer() + public ILogger Logger { get; } + + public GoogleDriveIndexer(ILogger logger) { + Logger = logger; + try { UserCredential credential; - using (FileStream fileStream = new("OpenDirectoryDownloader.GoogleDrive.json", FileMode.Open, FileAccess.Read)) - { - // The file token.json stores the user's access and refresh tokens, and is created - // automatically when the authorization flow completes for the first time. - string credPath = "token.json"; - credential = GoogleWebAuthorizationBroker.AuthorizeAsync( - GoogleClientSecrets.FromStream(fileStream).Secrets, - Scopes, - "user", - CancellationToken.None, - new FileDataStore(credPath, true)).Result; - - Console.WriteLine($"Credential file saved to: {credPath}"); - } + using FileStream fileStream = new("OpenDirectoryDownloader.GoogleDrive.json", FileMode.Open, FileAccess.Read); + + // The file token.json stores the user's access and refresh tokens, and is created + // automatically when the authorization flow completes for the first time. + string credPath = "token.json"; + credential = GoogleWebAuthorizationBroker.AuthorizeAsync( + GoogleClientSecrets.FromStream(fileStream).Secrets, + Scopes, + "user", + CancellationToken.None, + new FileDataStore(credPath, true)).Result; + + Console.WriteLine($"Credential file saved to: {credPath}"); // Create Drive API service. DriveService = new DriveService(new BaseClientService.Initializer() @@ -63,7 +64,7 @@ static GoogleDriveIndexer() } } - public static async Task IndexAsync(WebDirectory webDirectory, string resourceKey) + public async Task IndexAsync(WebDirectory webDirectory, string resourceKey) { webDirectory.StartTime = DateTimeOffset.UtcNow; string nextPageToken = string.Empty; @@ -149,7 +150,7 @@ public static async Task IndexAsync(WebDirectory webDirectory, str if (retries > 0) { - Logger.Warn($"Retrieval succesful after try {retries + 1} for {webDirectory.Url}"); + Logger.Warning("Retrieval succesful after try {retries} for {url}", retries + 1, webDirectory.Url); } if (string.IsNullOrWhiteSpace(nextPageToken)) @@ -171,19 +172,19 @@ public static async Task IndexAsync(WebDirectory webDirectory, str else { retries++; - Logger.Warn($"Google Drive error for {webDirectory.Url} on try {retries + 1}: {ex.Message}"); + Logger.Warning("Google Drive error for {url} on try {retries}: {error}", webDirectory.Url, retries + 1, ex.Message); } if (retries == maxRetries) { - Logger.Error($"Skip {webDirectory.Url} because of {maxRetries} consecutive errors on : {ex.Message}"); + Logger.Error("Skip {url} because of {maxRetries} consecutive errors on : {error}", webDirectory.Url, maxRetries, ex.Message); webDirectory.Error = true; return webDirectory; } } } - Logger.Debug($"Finished Google Drive Request for Folder {folderId}"); + Logger.Debug("Finished Google Drive Request for Folder {folderId}", folderId); return webDirectory; } diff --git a/src/OpenDirectoryDownloader.GoogleDrive/OpenDirectoryDownloader.GoogleDrive.csproj b/src/OpenDirectoryDownloader.GoogleDrive/OpenDirectoryDownloader.GoogleDrive.csproj index 9e0b3b9a..d341025c 100644 --- a/src/OpenDirectoryDownloader.GoogleDrive/OpenDirectoryDownloader.GoogleDrive.csproj +++ b/src/OpenDirectoryDownloader.GoogleDrive/OpenDirectoryDownloader.GoogleDrive.csproj @@ -11,7 +11,7 @@ - + diff --git a/src/OpenDirectoryDownloader/AngleSharpExtensions.cs b/src/OpenDirectoryDownloader/AngleSharpExtensions.cs index 7bbdf0b7..9ed25beb 100644 --- a/src/OpenDirectoryDownloader/AngleSharpExtensions.cs +++ b/src/OpenDirectoryDownloader/AngleSharpExtensions.cs @@ -11,7 +11,7 @@ public static IElement Parent(this IElement element, string elementName) do { parentElement = parentElement.ParentElement; - } while (parentElement != null && parentElement.TagName.ToUpper() != elementName.ToUpper()); + } while (parentElement != null && parentElement.TagName.ToUpperInvariant() != elementName.ToUpperInvariant()); return parentElement; } diff --git a/src/OpenDirectoryDownloader/BrowserContext.cs b/src/OpenDirectoryDownloader/BrowserContext.cs index 941f1361..b0d4b261 100644 --- a/src/OpenDirectoryDownloader/BrowserContext.cs +++ b/src/OpenDirectoryDownloader/BrowserContext.cs @@ -1,5 +1,4 @@ -using NLog; -using PuppeteerExtraSharp; +using PuppeteerExtraSharp; using PuppeteerExtraSharp.Plugins.ExtraStealth; using PuppeteerSharp; using System; @@ -13,8 +12,6 @@ namespace OpenDirectoryDownloader; public class BrowserContext: IDisposable { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); - private const string SetCookieHeader = "set-cookie"; private const string CloudflareClearanceKey = "cf_clearance"; @@ -63,31 +60,31 @@ public async Task DoCloudFlareAsync(string url) Stopwatch stopwatch = Stopwatch.StartNew(); - Logger.Debug($"Navigating to {url}.."); + Program.Logger.Debug("Navigating to {url}..", url); await Page.GoToAsync(url); await Task.Delay(TimeSpan.FromSeconds(60), CancellationTokenSource.Token); - Logger.Debug($"Navigation done in {stopwatch.ElapsedMilliseconds}ms"); + Program.Logger.Debug("Navigation done in {elapsedMilliseconds}ms", stopwatch.ElapsedMilliseconds); - Logger.Debug("Finished with browser!"); + Program.Logger.Debug("Finished with browser!"); } catch (OperationCanceledException ex) { if (!OK) { - Logger.Error(ex, "Looks like Cloudflare protection wasn't solved in time."); + Program.Logger.Error(ex, "Looks like Cloudflare protection wasn't solved in time."); } } catch (Exception ex) { - Logger.Error(ex, "Error with browser"); + Program.Logger.Error(ex, "Error with browser"); } finally { - Logger.Debug("Closing browser"); + Program.Logger.Debug("Closing browser"); await Browser.CloseAsync(); - Logger.Debug("Closed browser"); + Program.Logger.Debug("Closed browser"); } return OK; @@ -101,12 +98,12 @@ public async Task InitializeAsync() if (!browserFetcher.LocalRevisions().Contains(BrowserFetcher.DefaultChromiumRevision)) { - Logger.Warn($"Downloading browser... First time it can take a while, depending on your internet connection."); + Program.Logger.Warning("Downloading browser... First time it can take a while, depending on your internet connection."); RevisionInfo revisionInfo = await browserFetcher.DownloadAsync(BrowserFetcher.DefaultChromiumRevision); - Logger.Warn($"Downloaded browser. Downloaded: {revisionInfo.Downloaded}, Platform: {revisionInfo.Platform}, Revision: {revisionInfo.Revision}, Path: {revisionInfo.FolderPath}"); + Program.Logger.Warning("Downloaded browser. Downloaded: {downloaded}, Platform: {platform}, Revision: {revision}, Path: {path}", revisionInfo.Downloaded, revisionInfo.Platform, revisionInfo.Revision, revisionInfo.FolderPath); } - Logger.Debug($"Creating browser..."); + Program.Logger.Debug("Creating browser..."); PuppeteerExtra puppeteerExtra = new(); @@ -121,7 +118,7 @@ public async Task InitializeAsync() IgnoreHTTPSErrors = true }); - Logger.Info($"Started browser with PID {Browser.Process.Id}"); + Program.Logger.Information("Started browser with PID {processId}", Browser.Process.Id); Browser.Closed += Browser_Closed; Browser.Disconnected += Browser_Disconnected; @@ -129,9 +126,9 @@ public async Task InitializeAsync() Browser.TargetCreated += Browser_TargetCreated; Browser.TargetDestroyed += Browser_TargetDestroyed; - Logger.Debug($"Created browser."); + Program.Logger.Debug("Created browser."); - Logger.Debug($"Creating page..."); + Program.Logger.Debug("Creating page..."); Page = (await Browser.PagesAsync())[0]; @@ -155,11 +152,11 @@ public async Task InitializeAsync() Page.WorkerCreated += Page_WorkerCreated; Page.WorkerDestroyed += Page_WorkerDestroyed; - Logger.Debug($"Created page."); + Program.Logger.Debug("Created page."); } catch (Exception ex) { - Logger.Error(ex, "Error with initializing browser"); + Program.Logger.Error(ex, "Error with initializing browser"); throw; } } @@ -179,7 +176,7 @@ public async Task GetHtml(string url) Stopwatch stopwatch = Stopwatch.StartNew(); - Logger.Debug($"Navigating to {url}.."); + Program.Logger.Debug("Navigating to {url}..", url); NavigationOptions navigationOptions = new() { @@ -188,7 +185,7 @@ public async Task GetHtml(string url) }; await Page.GoToAsync(url, navigationOptions); - Logger.Debug($"Navigation done in {stopwatch.ElapsedMilliseconds}ms"); + Program.Logger.Debug("Navigation done in {elapsedMilliseconds}ms", stopwatch.ElapsedMilliseconds); string html = await Page.GetContentAsync(); @@ -198,12 +195,12 @@ public async Task GetHtml(string url) { if (!OK) { - Logger.Error(ex, "Timeout in navigating to URL"); + Program.Logger.Error(ex, "Timeout in navigating to URL"); } } catch (Exception ex) { - Logger.Error(ex, "Error with browser"); + Program.Logger.Error(ex, "Error with browser"); throw; } diff --git a/src/OpenDirectoryDownloader/Calibre/CalibreParser.cs b/src/OpenDirectoryDownloader/Calibre/CalibreParser.cs index f9354f98..73d339a5 100644 --- a/src/OpenDirectoryDownloader/Calibre/CalibreParser.cs +++ b/src/OpenDirectoryDownloader/Calibre/CalibreParser.cs @@ -1,5 +1,4 @@ -using NLog; -using OpenDirectoryDownloader.Helpers; +using OpenDirectoryDownloader.Helpers; using OpenDirectoryDownloader.Shared.Models; using System; using System.Collections.Generic; @@ -13,8 +12,6 @@ namespace OpenDirectoryDownloader.Calibre; public static class CalibreParser { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); - public static Version ParseVersion(string versionString) { if (versionString.Contains('/')) @@ -39,7 +36,7 @@ public static async Task ParseCalibre(HttpClient httpClient, Uri calibreRootUri, try { Console.WriteLine("Retrieving libraries..."); - Logger.Info("Retrieving libraries..."); + Program.Logger.Information("Retrieving libraries..."); if (version.Major < 3) { @@ -55,12 +52,12 @@ public static async Task ParseCalibre(HttpClient httpClient, Uri calibreRootUri, CalibreUpdate.CalibreUpdate calibreUpdate = CalibreUpdate.CalibreUpdate.FromJson(updateResultJson); Console.WriteLine($"Retrieved {calibreUpdate.LibraryMap.Count} libraries"); - Logger.Info($"Retrieved {calibreUpdate.LibraryMap.Count} libraries"); + Program.Logger.Information($"Retrieved {calibreUpdate.LibraryMap.Count} libraries"); foreach (KeyValuePair library in calibreUpdate.LibraryMap) { Console.WriteLine($"Retrieving metadata of books for library {library.Value}..."); - Logger.Info($"Retrieving metadata of books for library {library.Value}..."); + Program.Logger.Information($"Retrieving metadata of books for library {library.Value}..."); WebDirectory libraryWebDirectory = new(webDirectory) { @@ -89,10 +86,10 @@ public static async Task ParseCalibre(HttpClient httpClient, Uri calibreRootUri, CalibreResult.CalibreResult libraryResult = CalibreResult.CalibreResult.FromJson(libraryResultJson); Console.WriteLine($"Retrieved metadata of {libraryResult.Metadata.Count} books for library {library.Value}"); - Logger.Info($"Retrieved metadata of {libraryResult.Metadata.Count} books for library {library.Value}"); + Program.Logger.Information($"Retrieved metadata of {libraryResult.Metadata.Count} books for library {library.Value}"); Console.WriteLine($"Parsing info of {libraryResult.Metadata.Count} books for library {library.Value}..."); - Logger.Info($"Parsing info of {libraryResult.Metadata.Count} books for library {library.Value}..."); + Program.Logger.Information($"Parsing info of {libraryResult.Metadata.Count} books for library {library.Value}..."); int booksToIndex = libraryResult.Metadata.Count; int booksIndexed = 0; @@ -113,25 +110,25 @@ public static async Task ParseCalibre(HttpClient httpClient, Uri calibreRootUri, if (newBooksIndexed % 100 == 0 && stopwatch.Elapsed > TimeSpan.FromSeconds(5)) { - Logger.Warn($"Parsing books at {100 * ((decimal)newBooksIndexed / booksToIndex):F1}% ({newBooksIndexed}/{booksToIndex})"); + Program.Logger.Warning($"Parsing books at {100 * ((decimal)newBooksIndexed / booksToIndex):F1}% ({newBooksIndexed}/{booksToIndex})"); stopwatch.Restart(); } }); Console.WriteLine($"Parsed info of {libraryResult.Metadata.Count} books for library {library.Value}"); - Logger.Info($"Parsed info of {libraryResult.Metadata.Count} books for library {library.Value}"); + Program.Logger.Information($"Parsed info of {libraryResult.Metadata.Count} books for library {library.Value}"); } } catch (Exception ex) { - Logger.Error(ex, "Error parsing Calibre"); + Program.Logger.Error(ex, "Error parsing Calibre"); webDirectory.Error = true; } } private static void GetBookInfo(HttpClient httpClient, Uri calibreRootUri, KeyValuePair library, WebDirectory libraryWebDirectory, KeyValuePair book) { - Logger.Debug($"Retrieving info for book [{book.Key}]: {book.Value.Title}..."); + Program.Logger.Debug($"Retrieving info for book [{book.Key}]: {book.Value.Title}..."); WebDirectory bookWebDirectory = new(libraryWebDirectory) { @@ -180,19 +177,19 @@ private static void GetBookInfo(HttpClient httpClient, Uri calibreRootUri, KeyVa { bookWebDirectory.Files.Add(new WebFile { - Url = new Uri(calibreRootUri, $"./get/{format.ToUpper()}/{book.Key}/{library.Key}").ToString(), - FileName = $"{PathHelper.GetValidPath(book.Value.Title)} - {PathHelper.GetValidPath(book.Value.AuthorSort)}.{format.ToLower()}", + Url = new Uri(calibreRootUri, $"./get/{format.ToUpperInvariant()}/{book.Key}/{library.Key}").ToString(), + FileName = $"{PathHelper.GetValidPath(book.Value.Title)} - {PathHelper.GetValidPath(book.Value.AuthorSort)}.{format.ToLowerInvariant()}", FileSize = book.Value.FormatSizes.ContainsKey(format) ? book.Value.FormatSizes[format] : 0 }); } libraryWebDirectory.Subdirectories.Add(bookWebDirectory); - Logger.Debug($"Retrieved info for book [{book.Key}]: {book.Value.Title}"); + Program.Logger.Debug($"Retrieved info for book [{book.Key}]: {book.Value.Title}"); } catch (Exception ex) { - Logger.Debug(ex, $"Error processing book {book.Key}"); + Program.Logger.Debug(ex, $"Error processing book {book.Key}"); bookWebDirectory.Error = true; } } diff --git a/src/OpenDirectoryDownloader/Command.cs b/src/OpenDirectoryDownloader/Command.cs index 822c0d2f..8793ce99 100644 --- a/src/OpenDirectoryDownloader/Command.cs +++ b/src/OpenDirectoryDownloader/Command.cs @@ -1,5 +1,4 @@ -using NLog; -using OpenDirectoryDownloader.Shared.Models; +using OpenDirectoryDownloader.Shared.Models; using System; using System.Collections.Generic; using System.Diagnostics; @@ -15,7 +14,6 @@ namespace OpenDirectoryDownloader; public class Command { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private static readonly string VersionNumber = Assembly.GetExecutingAssembly().GetName().Version.ToString(); /// @@ -52,7 +50,7 @@ internal static void ProcessConsoleInput(OpenDirectoryIndexer openDirectoryIndex { string message = "Console input is redirect, maybe it is run inside another host. This could mean that no input will be send/processed."; Console.WriteLine(message); - Logger.Warn(message); + Program.Logger.Warning(message); } while (true) @@ -139,7 +137,7 @@ internal static void ProcessConsoleInput(OpenDirectoryIndexer openDirectoryIndex } catch (Exception ex) { - Logger.Error($"Error copying stats to clipboard: {ex.Message}"); + Program.Logger.Error("Error copying stats to clipboard: {error}", ex.Message); } KillApplication(); @@ -164,7 +162,7 @@ internal static void ProcessConsoleInput(OpenDirectoryIndexer openDirectoryIndex } catch (Exception ex) { - Logger.Error(ex, "Error processing action"); + Program.Logger.Error(ex, "Error processing action"); throw; } } @@ -176,17 +174,17 @@ private static void SaveSession(OpenDirectoryIndexer openDirectoryIndexer) { string jsonPath = Library.GetOutputFullPath(OpenDirectoryIndexer.Session, openDirectoryIndexer.OpenDirectoryIndexerSettings, "json"); - Logger.Info("Saving session to JSON.."); + Program.Logger.Information("Saving session to JSON.."); Console.WriteLine("Saving session to JSON.."); Library.SaveSessionJson(OpenDirectoryIndexer.Session, jsonPath); - Logger.Info($"Saved session to JSON: {jsonPath}"); + Program.Logger.Information("Saved session to JSON: {path}", jsonPath); Console.WriteLine($"Saved session to JSON: {jsonPath}"); } catch (Exception ex) { - Logger.Error(ex); + Program.Logger.Error(ex, "Error saving session to JSON"); } } @@ -194,26 +192,26 @@ private static void SaveUrls(OpenDirectoryIndexer openDirectoryIndexer) { try { - Logger.Info("Saving URL list to file.."); + Program.Logger.Information("Saving URL list to file.."); Console.WriteLine("Saving URL list to file.."); IEnumerable distinctUrls = OpenDirectoryIndexer.Session.Root.AllFileUrls.Distinct().Select(i => WebUtility.UrlDecode(i)); string urlsPath = Library.GetOutputFullPath(OpenDirectoryIndexer.Session, openDirectoryIndexer.OpenDirectoryIndexerSettings, "txt"); File.WriteAllLines(urlsPath, distinctUrls); - Logger.Info($"Saved URL list to file: {urlsPath}"); + Program.Logger.Information("Saved URL list to file: {path}", urlsPath); Console.WriteLine($"Saved URL list to file: {urlsPath}"); } catch (Exception ex) { - Logger.Error(ex); + Program.Logger.Error(ex, "Error saving URLs to file"); } } public static void KillApplication() { Console.WriteLine("Exiting..."); - Logger.Info("Exiting..."); + Program.Logger.Information("Exiting..."); Environment.Exit(1); } diff --git a/src/OpenDirectoryDownloader/DirectoryParser.cs b/src/OpenDirectoryDownloader/DirectoryParser.cs index 7a2efb33..429ca3fa 100644 --- a/src/OpenDirectoryDownloader/DirectoryParser.cs +++ b/src/OpenDirectoryDownloader/DirectoryParser.cs @@ -4,7 +4,6 @@ using Esprima; using Esprima.Ast; using Newtonsoft.Json; -using NLog; using OpenDirectoryDownloader.Helpers; using OpenDirectoryDownloader.Models; using OpenDirectoryDownloader.Shared; @@ -36,7 +35,6 @@ namespace OpenDirectoryDownloader; public static class DirectoryParser { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private static readonly HtmlParser HtmlParser = new(); /// @@ -107,15 +105,15 @@ public static async Task ParseHtml(WebDirectory webDirectory, stri { googleDriveIndexType = GoogleDriveIndexMapping.GetGoogleDriveIndexType(script.Source); - if (googleDriveIndexType is null && script.Source.ToLower().Contains("app.min.js")) + if (googleDriveIndexType is null && script.Source.ToLowerInvariant().Contains("app.min.js")) { - Logger.Warn($"Checking/downloading javascript for sourcemaps: {script.Source}"); + Program.Logger.Warning("Checking/downloading javascript for sourcemaps: {scriptUrl}", script.Source); string sourceMapUrl = await Library.GetSourceMapUrlFromJavaScriptAsync(httpClient, script.Source); if (!string.IsNullOrWhiteSpace(sourceMapUrl)) { string fullSourceMapUrl = new Uri(new Uri(script.Source), sourceMapUrl).ToString(); - Logger.Warn($"Checking/downloading sourcemap for known Google Drive index: {fullSourceMapUrl}"); + Program.Logger.Warning("Checking/downloading sourcemap for known Google Drive index: {sourceMapUrl}", fullSourceMapUrl); IAsyncEnumerable sources = Library.GetSourcesFromSourceMapAsync(httpClient, fullSourceMapUrl); @@ -131,7 +129,7 @@ public static async Task ParseHtml(WebDirectory webDirectory, stri } } - if (googleDriveIndexType is null && script.Source.ToLower().Contains("app.js")) + if (googleDriveIndexType is null && script.Source.ToLowerInvariant().Contains("app.js")) { string scriptUrl = script.Source; @@ -166,7 +164,7 @@ public static async Task ParseHtml(WebDirectory webDirectory, stri if (OpenDirectoryIndexer.Session.MaxThreads != 1) { - Logger.Warn($"Reduce threads to 1 because of Google Drive index"); + Program.Logger.Warning("Reduce threads to 1 because of Google Drive index"); OpenDirectoryIndexer.Session.MaxThreads = 1; } } @@ -352,27 +350,27 @@ public static async Task ParseHtml(WebDirectory webDirectory, stri if (parsedWebDirectory.Subdirectories.Count == 0 && parsedWebDirectory.Files.Count == 0 && htmlDocument.QuerySelector("noscript") != null) { - Logger.Warn("No directories and files found, but did find a public static class BlitzfilesTechParser { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private static readonly Regex DriveHashRegex = new(@"\/drive\/s\/(?.*)"); private const string Parser = "BlitzfilesTech"; private static readonly RateLimiter RateLimiter = new(1, TimeSpan.FromSeconds(1)); @@ -27,10 +25,10 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire if (!OpenDirectoryIndexer.Session.Parameters.ContainsKey(Constants.Parameters_Password)) { Console.WriteLine($"{Parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors."); - Logger.Info($"{Parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors."); + Program.Logger.Information("{parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors.", Parser); Console.WriteLine("Check if password is needed (unsupported currently)..."); - Logger.Info("Check if password is needed (unsupported currently)..."); + Program.Logger.Information("Check if password is needed (unsupported currently)..."); OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password] = string.Empty; HttpResponseMessage httpResponseMessage = await httpClient.GetAsync(GetFolderUrl(driveHash, string.Empty, 0)); @@ -51,7 +49,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire } catch (Exception ex) { - Logger.Error(ex, $"Error parsing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error parsing {parser} for '{url}'", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; @@ -81,7 +79,7 @@ private static string GetDriveHash(WebDirectory webDirectory) private static async Task ScanAsync(HttpClient httpClient, WebDirectory webDirectory) { - Logger.Debug($"Retrieving listings for {webDirectory.Uri} with password: {OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]}"); + Program.Logger.Debug("Retrieving listings for '{url}' with password: {password}", webDirectory.Uri, OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]); webDirectory.Parser = Parser; @@ -96,7 +94,7 @@ private static async Task ScanAsync(HttpClient httpClient, WebDire do { - Logger.Warn($"Retrieving listings for {webDirectory.Uri} with password: {OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]}, page {pageIndex + 1}"); + Program.Logger.Warning("Retrieving listings for '{url}' with password: {password}, page {pageIndex + 1}", webDirectory.Uri, OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]); HttpResponseMessage httpResponseMessage = await httpClient.GetAsync(GetFolderUrl(driveHash, entryHash, pageIndex)); @@ -136,7 +134,7 @@ private static async Task ScanAsync(HttpClient httpClient, WebDire } catch (Exception ex) { - Logger.Error(ex, $"Error processing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error processing {parser} for '{url}'", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; diff --git a/src/OpenDirectoryDownloader/Site/CrushFtp/CrushFtpParser.cs b/src/OpenDirectoryDownloader/Site/CrushFtp/CrushFtpParser.cs index 0fc6a1e6..92f1c78b 100644 --- a/src/OpenDirectoryDownloader/Site/CrushFtp/CrushFtpParser.cs +++ b/src/OpenDirectoryDownloader/Site/CrushFtp/CrushFtpParser.cs @@ -1,5 +1,4 @@ -using NLog; -using OpenDirectoryDownloader.Shared; +using OpenDirectoryDownloader.Shared; using OpenDirectoryDownloader.Shared.Models; using System; using System.Collections.Generic; @@ -12,10 +11,8 @@ namespace OpenDirectoryDownloader.Site.CrushFtp; public static class CrushFtpParser { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private const string Parser = "CrushFTP"; private static string Authentication; - //private static string Username; private static string FunctionUrl; private static readonly Random Random = new(); private static bool WarningShown = false; @@ -27,7 +24,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire { WarningShown = true; - Logger.Warn($"CrushFTP scanning is limited to {RateLimiter.MaxRequestsPerTimeSpan} directories per {RateLimiter.TimeSpan.TotalSeconds:F1} second(s)!"); + Program.Logger.Warning("{parser} scanning is limited to {maxRequestsPerTimeSpan} directories per {seconds:F1} second(s)!", Parser, RateLimiter.MaxRequestsPerTimeSpan, RateLimiter.TimeSpan.TotalSeconds); } try @@ -42,7 +39,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire } catch (Exception ex) { - Logger.Error(ex, $"Error parsing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error parsing {parser} for URL: {url}", Parser, webDirectory.Url); webDirectory.Error = true; throw; @@ -53,7 +50,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire private static async Task ScanAsync(HttpClient httpClient, WebDirectory webDirectory) { - Logger.Debug($"Retrieving listings for {webDirectory.Uri}"); + Program.Logger.Debug("Retrieving listings for '{url}'", webDirectory.Uri); webDirectory.Parser = Parser; @@ -102,7 +99,7 @@ private static async Task ScanAsync(HttpClient httpClient, WebDire } catch (Exception ex) { - Logger.Error(ex, $"Error processing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error processing {parser} for URL: {url}", Parser, webDirectory.Url); webDirectory.Error = true; throw; diff --git a/src/OpenDirectoryDownloader/Site/Dropbox/DropboxParser.cs b/src/OpenDirectoryDownloader/Site/Dropbox/DropboxParser.cs index f3eeef58..98b0af9b 100644 --- a/src/OpenDirectoryDownloader/Site/Dropbox/DropboxParser.cs +++ b/src/OpenDirectoryDownloader/Site/Dropbox/DropboxParser.cs @@ -1,6 +1,5 @@ using Esprima; using Esprima.Ast; -using NLog; using OpenDirectoryDownloader.Shared.Models; using System; using System.Collections.Generic; @@ -14,7 +13,6 @@ namespace OpenDirectoryDownloader.Site.Dropbox; public static class DropboxParser { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private static readonly Regex UrlRegex = new(@"\/sh\/(?[^\/]*)\/(?[^\/?]*)(?:\/(?[^?]*))?"); private static readonly Regex PrefetchListingRegex = new(@"window\[""__REGISTER_SHARED_LINK_FOLDER_PRELOAD_HANDLER""\]\.responseReceived\((?"".*)\)\s?}\);"); private const string Parser = "Dropbox"; @@ -28,7 +26,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire } catch (Exception ex) { - Logger.Error(ex, $"Error parsing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error parsing {parser} for '{url}'", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; @@ -46,7 +44,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire private static async Task ScanAsync(HttpClient httpClient, WebDirectory webDirectory, string html, HttpResponseMessage httpResponseMessage) { - Logger.Debug($"Retrieving listings for {webDirectory.Uri}"); + Program.Logger.Debug("Retrieving listings for '{url}'", webDirectory.Uri); webDirectory.Parser = Parser; @@ -130,7 +128,7 @@ private static async Task ScanAsync(HttpClient httpClient, WebDire if (takedownActive) { - Logger.Warn("Some entries are not provided because of DCMA/takedown."); + Program.Logger.Warning("Some entries are not provided because of DCMA/takedown."); } } else @@ -140,7 +138,7 @@ private static async Task ScanAsync(HttpClient httpClient, WebDire } catch (Exception ex) { - Logger.Error(ex, $"Error processing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error processing {parser} for '{url}'", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; @@ -173,7 +171,7 @@ private static void GetCsrfToken(WebDirectory webDirectory, HttpResponseMessage if (cookie is not null) { - Logger.Warn($"CSRF Token found on {webDirectory.Uri}"); + Program.Logger.Warning($"CSRF Token found on {webDirectory.Uri}"); OpenDirectoryIndexer.Session.Parameters[Parameters_CSRFToken] = cookie.Value; } } diff --git a/src/OpenDirectoryDownloader/Site/GDIndex/BhadooIndexParser.cs b/src/OpenDirectoryDownloader/Site/GDIndex/BhadooIndexParser.cs index 419c822f..f3051050 100644 --- a/src/OpenDirectoryDownloader/Site/GDIndex/BhadooIndexParser.cs +++ b/src/OpenDirectoryDownloader/Site/GDIndex/BhadooIndexParser.cs @@ -3,7 +3,6 @@ using Esprima.Ast; using Jint; using Jint.Native; -using NLog; using OpenDirectoryDownloader.Shared; using OpenDirectoryDownloader.Shared.Models; using System; @@ -20,7 +19,6 @@ namespace OpenDirectoryDownloader.Site.GDIndex.Bhadoo; /// public static class BhadooIndexParser { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private const string FolderMimeType = "application/vnd.google-apps.folder"; private const string Parser = "BhadooIndex"; private static readonly RateLimiter RateLimiter = new(1, TimeSpan.FromSeconds(1)); @@ -37,10 +35,10 @@ public static async Task ParseIndex(IHtmlDocument htmlDocument, Ht if (!OpenDirectoryIndexer.Session.Parameters.ContainsKey(Constants.Parameters_Password)) { Console.WriteLine($"{Parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors."); - Logger.Info($"{Parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors."); + Program.Logger.Information("{parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors.", Parser); Console.WriteLine("Check if password is needed (unsupported currently)..."); - Logger.Info("Check if password is needed (unsupported currently)..."); + Program.Logger.Information("Check if password is needed (unsupported currently)..."); OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password] = string.Empty; Dictionary postValues = new() @@ -62,9 +60,8 @@ public static async Task ParseIndex(IHtmlDocument htmlDocument, Ht if (response.Error != null) { - string errorMessage = $"Error {response.Error.Code}, '{response.Error.Message}' retrieving for URL: {webDirectory.Url}"; - Logger.Error(errorMessage); - throw new Exception(errorMessage); + Program.Logger.Error("Error {errorCode}, '{errorMessage}' retrieving for '{url}'", response.Error.Code, response.Error.Message, webDirectory.Url); + throw new Exception($"Error {response.Error.Code}, '{response.Error.Message}' retrieving for URL: {webDirectory.Url}"); } webDirectory = await ScanAsync(htmlDocument, httpClient, webDirectory); @@ -77,7 +74,7 @@ public static async Task ParseIndex(IHtmlDocument htmlDocument, Ht } catch (Exception ex) { - Logger.Error(ex, $"Error parsing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error parsing {parser} for '{url}'", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; @@ -174,7 +171,7 @@ private static async Task ScanAsync(IHtmlDocument htmlDocument, Ht { Polly.Retry.AsyncRetryPolicy asyncRetryPolicy = Library.GetAsyncRetryPolicy((ex, waitTimeSpan, retry, pollyContext) => { - Logger.Warn($"Error retrieving directory listing for {webDirectory.Uri}, waiting {waitTimeSpan.TotalSeconds} seconds.. Error: {ex.Message}"); + Program.Logger.Warning("Error retrieving directory listing for '{url}', waiting {waitTime:F0} seconds.. Error: {error}", webDirectory.Uri, waitTimeSpan.TotalSeconds, ex.Message); RateLimiter.AddDelay(waitTimeSpan); }, 8); @@ -192,7 +189,7 @@ await asyncRetryPolicy.ExecuteAndCaptureAsync(async () => { await RateLimiter.RateLimit(); - Logger.Warn($"Retrieving listings for {webDirectory.Uri.PathAndQuery}, page {pageIndex + 1}{(!string.IsNullOrWhiteSpace(OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]) ? $" with password: {OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]}" : string.Empty)}"); + Program.Logger.Warning("Retrieving listings for {relativeUrl}, page {page} with password: {password}", webDirectory.Uri.PathAndQuery, pageIndex + 1, OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]); Dictionary postValues = new() { @@ -222,9 +219,8 @@ await asyncRetryPolicy.ExecuteAndCaptureAsync(async () => else if (indexResponse.Error != null) { webDirectory.Error = true; - string errorMessage = $"Error {indexResponse.Error.Code}, '{indexResponse.Error.Message}' retrieving for URL: {webDirectory.Url}"; - Logger.Error(errorMessage); - throw new Exception(errorMessage); + Program.Logger.Error("Error {errprCode}, '{errorMessage}' retrieving for '{url}'", indexResponse.Error.Code, indexResponse.Error.Message, webDirectory.Url); + throw new Exception($"Error {indexResponse.Error.Code}, '{indexResponse.Error.Message}' retrieving for URL: {webDirectory.Url}"); } else { @@ -271,7 +267,7 @@ await asyncRetryPolicy.ExecuteAndCaptureAsync(async () => } catch (Exception ex) { - Logger.Error(ex, $"Error retrieving directory listing for {webDirectory.Url}"); + Program.Logger.Error(ex, "Error retrieving directory listing for {url}", webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; diff --git a/src/OpenDirectoryDownloader/Site/GDIndex/GdIndexParser.cs b/src/OpenDirectoryDownloader/Site/GDIndex/GdIndexParser.cs index 472c4229..93742c86 100644 --- a/src/OpenDirectoryDownloader/Site/GDIndex/GdIndexParser.cs +++ b/src/OpenDirectoryDownloader/Site/GDIndex/GdIndexParser.cs @@ -1,5 +1,4 @@ using Newtonsoft.Json; -using NLog; using OpenDirectoryDownloader.Shared; using OpenDirectoryDownloader.Shared.Models; using System; @@ -12,7 +11,6 @@ namespace OpenDirectoryDownloader.Site.GDIndex.GdIndex; public static class GdIndexParser { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private const string FolderMimeType = "application/vnd.google-apps.folder"; private static readonly Regex RootIdRegex = new(@"default_root_id: '(?.*?)'"); private const string Parser = "GdIndex"; @@ -39,10 +37,10 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire if (!OpenDirectoryIndexer.Session.Parameters.ContainsKey(Constants.Parameters_Password)) { Console.WriteLine($"{Parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors."); - Logger.Info($"{Parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors."); + Program.Logger.Information("{parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors.", Parser); Console.WriteLine("Check if password is needed..."); - Logger.Info("Check if password is needed..."); + Program.Logger.Information("Check if password is needed..."); OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password] = null; HttpResponseMessage httpResponseMessage = await httpClient.PostAsync($"{webDirectory.Uri}?rootId={rootId}", null); @@ -57,12 +55,12 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire if (indexResponse == null) { Console.WriteLine("Directory is password protected, please enter password:"); - Logger.Info("Directory is password protected, please enter password."); + Program.Logger.Information("Directory is password protected, please enter password."); OpenDirectoryIndexer.Session.Parameters["GoIndex_Password"] = Console.ReadLine(); Console.WriteLine($"Using password: {OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]}"); - Logger.Info($"Using password: {OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]}"); + Program.Logger.Information("Using password: {password}", OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]); httpResponseMessage = await httpClient.PostAsync($"{webDirectory.Uri}?rootId={rootId}", new StringContent(JsonConvert.SerializeObject(new Dictionary { @@ -83,15 +81,15 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire if (indexResponse != null) { Console.WriteLine("Password OK!"); - Logger.Info("Password OK!"); + Program.Logger.Information("Password OK!"); webDirectory = await ScanIndexAsync(httpClient, webDirectory); } else { OpenDirectoryIndexer.Session.Parameters.Remove(Constants.Parameters_Password); - Console.WriteLine($"Error. Stopping."); - Logger.Error($"Error. Stopping."); + Console.WriteLine("Error. Stopping."); + Program.Logger.Error("Error. Stopping."); } } else @@ -102,7 +100,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire catch (Exception ex) { RateLimiter.AddDelay(TimeSpan.FromSeconds(5)); - Logger.Error(ex, $"Error parsing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error parsing {parser} for '{url}'", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; @@ -138,7 +136,7 @@ private static async Task ScanIndexAsync(HttpClient httpClient, We { Polly.Retry.AsyncRetryPolicy asyncRetryPolicy = Library.GetAsyncRetryPolicy((ex, waitTimeSpan, retry, pollyContext) => { - Logger.Warn($"Error retrieving directory listing for {webDirectory.Uri}, waiting {waitTimeSpan.TotalSeconds} seconds.. Error: {ex.Message}"); + Program.Logger.Warning("Error retrieving directory listing for {url}, waiting {waitTime:F0} seconds.. Error: {error}", webDirectory.Uri, waitTimeSpan.TotalSeconds, ex.Message); RateLimiter.AddDelay(waitTimeSpan); }, 8); @@ -151,7 +149,7 @@ await asyncRetryPolicy.ExecuteAndCaptureAsync(async () => webDirectory.Url += "/"; } - Logger.Warn($"Retrieving listings for {webDirectory.Uri}"); + Program.Logger.Warning("Retrieving listings for {url}", webDirectory.Uri); HttpResponseMessage httpResponseMessage = await httpClient.PostAsync($"{OpenDirectoryIndexer.Session.Root.Url}{Uri.EscapeDataString(webDirectory.Url.Replace(OpenDirectoryIndexer.Session.Root.Url, string.Empty).TrimEnd('/'))}/?rootId={OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_GdIndex_RootId]}", null); @@ -190,7 +188,7 @@ await asyncRetryPolicy.ExecuteAndCaptureAsync(async () => } catch (Exception ex) { - Logger.Error(ex, $"Error retrieving directory listing for {webDirectory.Url}"); + Program.Logger.Error(ex, "Error retrieving directory listing for {url}", webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; diff --git a/src/OpenDirectoryDownloader/Site/GDIndex/Go2IndexParser.cs b/src/OpenDirectoryDownloader/Site/GDIndex/Go2IndexParser.cs index 7eb7ca58..141eee8d 100644 --- a/src/OpenDirectoryDownloader/Site/GDIndex/Go2IndexParser.cs +++ b/src/OpenDirectoryDownloader/Site/GDIndex/Go2IndexParser.cs @@ -1,5 +1,4 @@ using Newtonsoft.Json; -using NLog; using OpenDirectoryDownloader.Shared; using OpenDirectoryDownloader.Shared.Models; using System; @@ -12,7 +11,6 @@ namespace OpenDirectoryDownloader.Site.GDIndex.Go2Index; public static class Go2IndexParser { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private const string FolderMimeType = "application/vnd.google-apps.folder"; private const string Parser = "Go2Index"; private static readonly RateLimiter RateLimiter = new(1, TimeSpan.FromSeconds(1)); @@ -26,10 +24,10 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire if (!OpenDirectoryIndexer.Session.Parameters.ContainsKey(Constants.Parameters_Password)) { Console.WriteLine($"{Parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors."); - Logger.Info($"{Parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors."); + Program.Logger.Information("{parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors.", Parser); Console.WriteLine("Check if password is needed..."); - Logger.Info("Check if password is needed..."); + Program.Logger.Information("Check if password is needed..."); OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password] = null; HttpResponseMessage httpResponseMessage = await httpClient.PostAsync(webDirectory.Uri, new StringContent(JsonConvert.SerializeObject(new Dictionary @@ -50,12 +48,12 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire if (indexResponse.Error?.Code == (int)HttpStatusCode.Unauthorized) { Console.WriteLine("Directory is password protected, please enter password:"); - Logger.Info("Directory is password protected, please enter password."); + Program.Logger.Information("Directory is password protected, please enter password."); OpenDirectoryIndexer.Session.Parameters["GoIndex_Password"] = Console.ReadLine(); Console.WriteLine($"Using password: {OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]}"); - Logger.Info($"Using password: {OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]}"); + Program.Logger.Information("Using password: {password}", OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]); httpResponseMessage = await httpClient.PostAsync(webDirectory.Uri, new StringContent(JsonConvert.SerializeObject(new Dictionary { @@ -76,14 +74,14 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire if (indexResponse is null) { Console.WriteLine("Error. Invalid response. Stopping."); - Logger.Error("Error. Invalid response. Stopping."); + Program.Logger.Error("Error. Invalid response. Stopping."); } else { if (indexResponse.Error == null) { Console.WriteLine("Password OK!"); - Logger.Info("Password OK!"); + Program.Logger.Information("Password OK!"); webDirectory = await ScanIndexAsync(httpClient, webDirectory); } @@ -91,7 +89,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire { OpenDirectoryIndexer.Session.Parameters.Remove(Constants.Parameters_Password); Console.WriteLine($"Error. Code: {indexResponse.Error.Code}, Message: {indexResponse.Error.Message}. Stopping."); - Logger.Error($"Error. Code: {indexResponse.Error.Code}, Message: {indexResponse.Error.Message}. Stopping."); + Program.Logger.Error("Error. Code: {errorCode}, Message: {errorMessage}. Stopping.", indexResponse.Error.Code, indexResponse.Error.Message); } } } @@ -103,7 +101,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire catch (Exception ex) { RateLimiter.AddDelay(TimeSpan.FromSeconds(5)); - Logger.Error(ex, $"Error parsing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error parsing {parser} for '{url}'", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; @@ -127,7 +125,7 @@ private static async Task ScanIndexAsync(HttpClient httpClient, We { Polly.Retry.AsyncRetryPolicy asyncRetryPolicy = Library.GetAsyncRetryPolicy((ex, waitTimeSpan, retry, pollyContext) => { - Logger.Warn($"Error retrieving directory listing for {webDirectory.Uri}, waiting {waitTimeSpan.TotalSeconds} seconds.. Error: {ex.Message}"); + Program.Logger.Warning("Error retrieving directory listing for {url}, waiting {waitTime:F0} seconds.. Error: {error}", webDirectory.Uri, waitTimeSpan.TotalSeconds, ex.Message); RateLimiter.AddDelay(waitTimeSpan); }, 8); @@ -145,7 +143,7 @@ await asyncRetryPolicy.ExecuteAndCaptureAsync(async () => { await RateLimiter.RateLimit(); - Logger.Warn($"Retrieving listings for {webDirectory.Uri.PathAndQuery}, page {pageIndex + 1}{(!string.IsNullOrWhiteSpace(OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]) ? $" with password: {OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]}" : string.Empty)}"); + Program.Logger.Warning("Retrieving listings for {relativeUrl}, page {page} with password: {password}", webDirectory.Uri.PathAndQuery, pageIndex + 1, OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]); HttpResponseMessage httpResponseMessage = await httpClient.PostAsync(webDirectory.Uri, new StringContent(JsonConvert.SerializeObject(new Dictionary { @@ -211,7 +209,7 @@ await asyncRetryPolicy.ExecuteAndCaptureAsync(async () => } catch (Exception ex) { - Logger.Error(ex, $"Error retrieving directory listing for {webDirectory.Url}"); + Program.Logger.Error(ex, "Error retrieving directory listing for {url}", webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; diff --git a/src/OpenDirectoryDownloader/Site/GDIndex/GoIndexParser.cs b/src/OpenDirectoryDownloader/Site/GDIndex/GoIndexParser.cs index 9701fa2a..9d3a1547 100644 --- a/src/OpenDirectoryDownloader/Site/GDIndex/GoIndexParser.cs +++ b/src/OpenDirectoryDownloader/Site/GDIndex/GoIndexParser.cs @@ -1,8 +1,6 @@ using Newtonsoft.Json; -using NLog; using OpenDirectoryDownloader.Shared; using OpenDirectoryDownloader.Shared.Models; -using OpenDirectoryDownloader.Site.GDIndex.GoIndex; using System; using System.Collections.Generic; using System.Net; @@ -13,7 +11,6 @@ namespace OpenDirectoryDownloader.Site.GDIndex.GoIndex; public static class GoIndexParser { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private const string FolderMimeType = "application/vnd.google-apps.folder"; private const string Parser = "GoIndex"; private static readonly RateLimiter RateLimiter = new(1, TimeSpan.FromSeconds(1)); @@ -27,10 +24,10 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire if (!OpenDirectoryIndexer.Session.Parameters.ContainsKey(Constants.Parameters_Password)) { Console.WriteLine($"{Parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors."); - Logger.Info($"{Parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors."); + Program.Logger.Information("{parser} will always be indexed at a maximum rate of 1 per second, else you will run into problems and errors.", Parser); Console.WriteLine("Check if password is needed..."); - Logger.Info("Check if password is needed..."); + Program.Logger.Information("Check if password is needed..."); OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password] = ""; HttpResponseMessage httpResponseMessage = await httpClient.PostAsync(webDirectory.Uri, new StringContent(JsonConvert.SerializeObject(new Dictionary @@ -48,12 +45,12 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire if (indexResponse.Error?.Code == (int)HttpStatusCode.Unauthorized) { Console.WriteLine("Directory is password protected, please enter password:"); - Logger.Info("Directory is password protected, please enter password."); + Program.Logger.Information("Directory is password protected, please enter password."); OpenDirectoryIndexer.Session.Parameters["GoIndex_Password"] = Console.ReadLine(); Console.WriteLine($"Using password: {OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]}"); - Logger.Info($"Using password: {OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]}"); + Program.Logger.Information("Using password: {password}", OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]); httpResponseMessage = await httpClient.PostAsync(webDirectory.Uri, new StringContent(JsonConvert.SerializeObject(new Dictionary { @@ -70,20 +67,20 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire if (indexResponse is null) { - Logger.Error("Error. Invalid response. Stopping."); + Program.Logger.Error("Error. Invalid response. Stopping."); } else { if (indexResponse.Error == null) { - Logger.Warn("Password OK!"); + Program.Logger.Warning("Password OK!"); webDirectory = await ScanIndexAsync(httpClient, webDirectory); } else { OpenDirectoryIndexer.Session.Parameters.Remove(Constants.Parameters_Password); - Logger.Error($"Error. Code: {indexResponse.Error.Code}, Message: {indexResponse.Error.Message}. Stopping."); + Program.Logger.Error("Error. Code: {errorCode}, Message: {errorMessage}. Stopping.", indexResponse.Error.Code, indexResponse.Error.Message); } } } @@ -94,7 +91,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire } catch (Exception ex) { - Logger.Error(ex, $"Error parsing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error parsing {parser} for {url}", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; @@ -118,7 +115,7 @@ private static async Task ScanIndexAsync(HttpClient httpClient, We { Polly.Retry.AsyncRetryPolicy asyncRetryPolicy = Library.GetAsyncRetryPolicy((ex, waitTimeSpan, retry, pollyContext) => { - Logger.Warn($"Error retrieving directory listing for {webDirectory.Uri}, waiting {waitTimeSpan.TotalSeconds} seconds.. Error: {ex.Message}"); + Program.Logger.Warning("Error retrieving directory listing for {url}, waiting {waiTime:F0} seconds.. Error: {error}", webDirectory.Uri, waitTimeSpan.TotalSeconds, ex.Message); RateLimiter.AddDelay(waitTimeSpan); }, 8); @@ -131,7 +128,7 @@ await asyncRetryPolicy.ExecuteAndCaptureAsync(async () => webDirectory.Url += "/"; } - Logger.Warn($"Retrieving listings for {webDirectory.Uri.PathAndQuery}{(!string.IsNullOrWhiteSpace(OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]) ? $" with password: {OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]}" : string.Empty)}"); + Program.Logger.Warning("Retrieving listings for {relativeUrl} with password: {password}", webDirectory.Uri.PathAndQuery, OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_Password]); HttpResponseMessage httpResponseMessage = await httpClient.PostAsync(webDirectory.Uri, new StringContent(JsonConvert.SerializeObject(new Dictionary { @@ -178,7 +175,7 @@ await asyncRetryPolicy.ExecuteAndCaptureAsync(async () => } catch (Exception ex) { - Logger.Error(ex, $"Error retrieving directory listing for {webDirectory.Url}"); + Program.Logger.Error(ex, "Error retrieving directory listing for {url}", webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; diff --git a/src/OpenDirectoryDownloader/Site/GDIndex/GoogleDriveIndexMapping.cs b/src/OpenDirectoryDownloader/Site/GDIndex/GoogleDriveIndexMapping.cs index e34d70cc..fe952878 100644 --- a/src/OpenDirectoryDownloader/Site/GDIndex/GoogleDriveIndexMapping.cs +++ b/src/OpenDirectoryDownloader/Site/GDIndex/GoogleDriveIndexMapping.cs @@ -49,7 +49,7 @@ public static string GetGoogleDriveIndexType(string scriptUrl) { foreach (KeyValuePair siteMapping in SiteMapping) { - if (scriptUrl.ToLower().Contains(siteMapping.Key.ToLower())) + if (scriptUrl.ToLowerInvariant().Contains(siteMapping.Key.ToLowerInvariant())) { return siteMapping.Value; } diff --git a/src/OpenDirectoryDownloader/Site/GitHub/GitHubParser.cs b/src/OpenDirectoryDownloader/Site/GitHub/GitHubParser.cs index 047124d5..69b43359 100644 --- a/src/OpenDirectoryDownloader/Site/GitHub/GitHubParser.cs +++ b/src/OpenDirectoryDownloader/Site/GitHub/GitHubParser.cs @@ -1,5 +1,4 @@ using Newtonsoft.Json.Linq; -using NLog; using OpenDirectoryDownloader.Models; using OpenDirectoryDownloader.Shared.Models; using System; @@ -14,7 +13,6 @@ namespace OpenDirectoryDownloader.Site.GitHub; public static class GitHubParser { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private const string Parser = "GitHub"; private static string Owner { get; set; } private static string Repository { get; set; } @@ -43,11 +41,11 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire if (!string.IsNullOrWhiteSpace(token)) { - Logger.Warn($"Using provided GitHub token for higher rate limits"); + Program.Logger.Warning("Using provided GitHub token for higher rate limits"); httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("token", token); } - Logger.Warn("Retrieving default branch"); + Program.Logger.Warning("Retrieving default branch"); HttpResponseMessage httpResponseMessage = await DoRequest(httpClient, GetApiUrl(Owner, Repository)); string json = await httpResponseMessage.Content.ReadAsStringAsync(); @@ -58,9 +56,9 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire throw new Exception("Invalid default branch"); } - Logger.Warn($"Default branch: {DefaultBranch}"); + Program.Logger.Warning("Default branch: {defaultBranch}", DefaultBranch); - Logger.Warn("Retrieving last commit SHA"); + Program.Logger.Warning("Retrieving last commit SHA"); httpResponseMessage = await DoRequest(httpClient, $"{GetApiUrl(Owner, Repository)}/branches/{DefaultBranch}"); @@ -72,14 +70,14 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire throw new Exception("Empty repository"); } - Logger.Warn($"Last commit SHA: {CurrentCommitSha}"); + Program.Logger.Warning("Last commit SHA: {commitSha}", CurrentCommitSha); } webDirectory = await ScanAsync(httpClient, webDirectory); } catch (Exception ex) { - Logger.Error(ex, $"Error parsing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error parsing {parser} for {url}", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; @@ -111,7 +109,7 @@ private static async Task DoRequest(HttpClient httpClient, if (httpResponseMessage.Headers.Contains("X-RateLimit-Remaining")) { - Logger.Warn($"RateLimit remaining: {GetHeader(httpResponseMessage.Headers, "X-RateLimit-Remaining")}/{GetHeader(httpResponseMessage.Headers, "X-RateLimit-Limit")}"); + Program.Logger.Warning("RateLimit remaining: {rateLimitRemaining}/{rateLimitTotal}", GetHeader(httpResponseMessage.Headers, "X-RateLimit-Remaining"), GetHeader(httpResponseMessage.Headers, "X-RateLimit-Limit")); } if (httpResponseMessage.StatusCode == HttpStatusCode.Unauthorized) @@ -137,7 +135,7 @@ private static async Task DoRequest(HttpClient httpClient, resetDateTime = currentDate + rateLimitTimeSpan; - Logger.Warn($"Rate limited, waiting until {resetDateTime.ToLocalTime().ToString(Constants.DateTimeFormat)}.. Increase rate limits by using a token: https://github.com/settings/tokens/new (no scopes required)"); + Program.Logger.Warning("Rate limited, waiting until {untilDate}.. Increase rate limits by using a token: https://github.com/settings/tokens/new (no scopes required)", resetDateTime.ToLocalTime().ToString(Constants.DateTimeFormat)); OpenDirectoryIndexer.ShowStatistics = false; await Task.Delay(rateLimitTimeSpan); @@ -155,7 +153,7 @@ private static async Task DoRequest(HttpClient httpClient, private static async Task ScanAsync(HttpClient httpClient, WebDirectory webDirectory) { - Logger.Debug($"Retrieving listings for {webDirectory.Uri}"); + Program.Logger.Debug("Retrieving listings for {url}", webDirectory.Uri); webDirectory.Parser = Parser; @@ -183,7 +181,7 @@ private static async Task ScanAsync(HttpClient httpClient, WebDire if (gitHubResult.Truncated) { - Logger.Warn($"GitHub response is truncated with {gitHubResult.Tree.Length} items, sadly there is no paging available.."); + Program.Logger.Warning("GitHub response is truncated with {items} items, sadly there is no paging available..", gitHubResult.Tree.Length); } WebDirectory currentWebDirectory = webDirectory; @@ -232,7 +230,7 @@ private static async Task ScanAsync(HttpClient httpClient, WebDire } catch (Exception ex) { - Logger.Error(ex, $"Error processing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error processing {parser} for {url}", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; diff --git a/src/OpenDirectoryDownloader/Site/GoFileIO/GoFileIOParser.cs b/src/OpenDirectoryDownloader/Site/GoFileIO/GoFileIOParser.cs index 1eb19e74..a8704d85 100644 --- a/src/OpenDirectoryDownloader/Site/GoFileIO/GoFileIOParser.cs +++ b/src/OpenDirectoryDownloader/Site/GoFileIO/GoFileIOParser.cs @@ -1,5 +1,4 @@ -using NLog; -using OpenDirectoryDownloader.Shared.Models; +using OpenDirectoryDownloader.Shared.Models; using System; using System.Net.Http; using System.Text.RegularExpressions; @@ -9,7 +8,6 @@ namespace OpenDirectoryDownloader.Site.GoFileIO; public static class GoFileIOParser { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private static readonly Regex FolderHashRegex = new(@".*?\/d\/(?.*)"); private const string Parser = "GoFileIO"; private const string StatusOK = "ok"; @@ -24,7 +22,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire if (!OpenDirectoryIndexer.Session.Parameters.ContainsKey(Constants.Parameters_GoFileIOAccountToken)) { Console.WriteLine($"{Parser} creating temporary account."); - Logger.Info($"{Parser} creating temporary account."); + Program.Logger.Information("{parser} creating temporary account.", Parser); HttpResponseMessage httpResponseMessage = await httpClient.GetAsync($"{ApiBaseAddress}/createAccount"); @@ -51,7 +49,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire } catch (Exception ex) { - Logger.Error(ex, $"Error parsing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error parsing {parser} for {url}", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; @@ -81,7 +79,7 @@ private static string GetDriveHash(WebDirectory webDirectory) private static async Task ScanAsync(HttpClient httpClient, WebDirectory webDirectory) { - Logger.Debug($"Retrieving listings for {webDirectory.Uri}"); + Program.Logger.Debug("Retrieving listings for {url}", webDirectory.Uri); webDirectory.Parser = Parser; @@ -89,7 +87,7 @@ private static async Task ScanAsync(HttpClient httpClient, WebDire { string driveHash = GetDriveHash(webDirectory); - Logger.Warn($"Retrieving listings for {webDirectory.Uri}"); + Program.Logger.Warning("Retrieving listings for {url}", webDirectory.Uri); HttpResponseMessage httpResponseMessage = await httpClient.GetAsync(GetApiListingUrl(driveHash, OpenDirectoryIndexer.Session.Parameters[Constants.Parameters_GoFileIOAccountToken])); @@ -129,7 +127,7 @@ private static async Task ScanAsync(HttpClient httpClient, WebDire } catch (Exception ex) { - Logger.Error(ex, $"Error processing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error processing {parser} for {url}", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; diff --git a/src/OpenDirectoryDownloader/Site/Mediafire/MediafireParser.cs b/src/OpenDirectoryDownloader/Site/Mediafire/MediafireParser.cs index 68f8384b..a14a4b6e 100644 --- a/src/OpenDirectoryDownloader/Site/Mediafire/MediafireParser.cs +++ b/src/OpenDirectoryDownloader/Site/Mediafire/MediafireParser.cs @@ -1,5 +1,4 @@ -using NLog; -using OpenDirectoryDownloader.Shared.Models; +using OpenDirectoryDownloader.Shared.Models; using System; using System.Net.Http; using System.Text.RegularExpressions; @@ -9,7 +8,6 @@ namespace OpenDirectoryDownloader.Site.Mediafire; public static class MediafireParser { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private static readonly Regex FolderIdRegex = new(@"\/folder\/(?[^/]*)(?:\/?.*)?"); private static readonly Regex FolderIdRegex2 = new(@"\/\?(?[^/]*)(?:\/?.*)?"); private const string Parser = "Mediafire"; @@ -24,7 +22,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire } catch (Exception ex) { - Logger.Error(ex, $"Error parsing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error parsing {parser} for {url}", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; @@ -61,7 +59,7 @@ private static string GetFolderId(WebDirectory webDirectory) private static async Task ScanAsync(HttpClient httpClient, WebDirectory webDirectory) { - Logger.Debug($"Retrieving listings for {webDirectory.Uri}"); + Program.Logger.Debug("Retrieving listings for {url}", webDirectory.Url); webDirectory.Parser = Parser; @@ -76,7 +74,7 @@ private static async Task ScanAsync(HttpClient httpClient, WebDire do { - Logger.Warn($"Retrieving {listingType} listing for {webDirectory.Uri}, page {chunkNumber}"); + Program.Logger.Warning("Retrieving {listingType} listing for {url}, page {page}", listingType, webDirectory.Url, chunkNumber); HttpResponseMessage httpResponseMessage = await httpClient.GetAsync(GetApiListingUrl(folderId, listingType, chunkNumber)); @@ -102,7 +100,7 @@ private static async Task ScanAsync(HttpClient httpClient, WebDire } catch (Exception ex) { - Logger.Error(ex, $"Error processing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error processing {parser} for {url}", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; diff --git a/src/OpenDirectoryDownloader/Site/Pixeldrain/PixeldrainParser.cs b/src/OpenDirectoryDownloader/Site/Pixeldrain/PixeldrainParser.cs index 17665dbe..262dda5a 100644 --- a/src/OpenDirectoryDownloader/Site/Pixeldrain/PixeldrainParser.cs +++ b/src/OpenDirectoryDownloader/Site/Pixeldrain/PixeldrainParser.cs @@ -1,5 +1,4 @@ -using NLog; -using OpenDirectoryDownloader.Shared.Models; +using OpenDirectoryDownloader.Shared.Models; using OpenDirectoryDownloader.Site.Pixeldrain.FileResult; using OpenDirectoryDownloader.Site.Pixeldrain.ListResult; using System; @@ -11,7 +10,6 @@ namespace OpenDirectoryDownloader.Site.Pixeldrain; public static class PixeldrainParser { - private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private static readonly Regex ListingTypeRegex = new(@".*\/(?.*)\/.*"); private static readonly Regex ListingRegex = new(@"window\.viewer_data = (?.*);"); private const string Parser = "Pixeldrain"; @@ -26,7 +24,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire } catch (Exception ex) { - Logger.Error(ex, $"Error parsing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error parsing {parser} for {url}", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++; @@ -44,7 +42,7 @@ public static async Task ParseIndex(HttpClient httpClient, WebDire private static async Task ScanAsync(HttpClient httpClient, WebDirectory webDirectory, string html) { - Logger.Debug($"Retrieving listings for {webDirectory.Uri}"); + Program.Logger.Debug("Retrieving listings for {url}", webDirectory.Url); webDirectory.Parser = Parser; @@ -59,7 +57,7 @@ private static async Task ScanAsync(HttpClient httpClient, WebDire string listingType = listingTypeRegexMatch.Groups["ListingType"].Value; - Logger.Warn($"Retrieving listings for {webDirectory.Uri}"); + Program.Logger.Warning("Retrieving listings for {url}", webDirectory.Url); Match listingRegexMatch = ListingRegex.Match(html); @@ -104,7 +102,7 @@ private static async Task ScanAsync(HttpClient httpClient, WebDire } catch (Exception ex) { - Logger.Error(ex, $"Error processing {Parser} for URL: {webDirectory.Url}"); + Program.Logger.Error(ex, "Error processing {parser} for {url}", Parser, webDirectory.Url); webDirectory.Error = true; OpenDirectoryIndexer.Session.Errors++;