diff --git a/README.md b/README.md index e2063fb60799d2d79dc7127678e2a64cbe2fec8b..c88e1368e00d0904f4a088d07d22534ac7d67075 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,7 @@ Developer note: The software implements the [Torznab](https://github.com/Sonarr/ #### Supported Private Trackers + * Abnormal * AlphaRatio * AnimeBytes * Avistaz diff --git a/src/Jackett/Content/logos/abnormal.png b/src/Jackett/Content/logos/abnormal.png new file mode 100644 index 0000000000000000000000000000000000000000..c8e47e02af7132d85cb177a6676d75401d4faaa7 Binary files /dev/null and b/src/Jackett/Content/logos/abnormal.png differ diff --git a/src/Jackett/Indexers/Abnormal.cs b/src/Jackett/Indexers/Abnormal.cs new file mode 100644 index 0000000000000000000000000000000000000000..69616341afb1237648536d23a820efb6c7652257 --- /dev/null +++ b/src/Jackett/Indexers/Abnormal.cs @@ -0,0 +1,870 @@ +using System; +using System.Collections.Generic; +using System.Collections.Specialized; +using System.Linq; +using System.Reflection; +using System.Text.RegularExpressions; +using System.Threading.Tasks; +using System.Web; +using CsQuery; +using Jackett.Models; +using Jackett.Models.IndexerConfig.Bespoke; +using Jackett.Services; +using Jackett.Utils; +using Jackett.Utils.Clients; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using NLog; + +namespace Jackett.Indexers +{ + /// <summary> + /// Provider for Abnormal Private French Tracker + /// </summary> + public class Abnormal : BaseIndexer, IIndexer + { + private string LoginUrl { get { return SiteLink + "login.php"; } } + private string SearchUrl { get { return SiteLink + "torrents.php"; } } + private string TorrentCommentUrl { get { return TorrentDescriptionUrl; } } + private string TorrentDescriptionUrl { get { return SiteLink + "torrents.php?id="; } } + private string TorrentDownloadUrl { get { return SiteLink + "torrents.php?action=download&id={id}&authkey={auth_key}&torrent_pass={torrent_pass}"; } } + private bool Latency { get { return ConfigData.Latency.Value; } } + private bool DevMode { get { return ConfigData.DevMode.Value; } } + private bool CacheMode { get { return ConfigData.HardDriveCache.Value; } } + private string directory { get { return System.IO.Path.GetTempPath() + "Jackett\\" + MethodBase.GetCurrentMethod().DeclaringType.Name + "\\"; } } + + private Dictionary<string, string> emulatedBrowserHeaders = new Dictionary<string, string>(); + private CQ fDom = null; + + private ConfigurationDataAbnormal ConfigData + { + get { return (ConfigurationDataAbnormal)configData; } + set { base.configData = value; } + } + + public Abnormal(IIndexerManagerService i, IWebClient w, Logger l, IProtectionService ps) + : base( + name: "Abnormal", + description: "General French Private Tracker", + link: "https://abnormal.ws/", + caps: new TorznabCapabilities(), + manager: i, + client: w, + logger: l, + p: ps, + downloadBase: "https://abnormal.ws/torrents.php?action=download&id=", + configData: new ConfigurationDataAbnormal()) + { + // Clean capabilities + TorznabCaps.Categories.Clear(); + + // Movies + AddCategoryMapping("MOVIE|DVDR", TorznabCatType.MoviesDVD); // DVDR + AddCategoryMapping("MOVIE|DVDRIP", TorznabCatType.MoviesSD); // DVDRIP + AddCategoryMapping("MOVIE|BDRIP", TorznabCatType.MoviesSD); // BDRIP + AddCategoryMapping("MOVIE|VOSTFR", TorznabCatType.MoviesOther); // VOSTFR + AddCategoryMapping("MOVIE|HD|720p", TorznabCatType.MoviesHD); // HD 720P + AddCategoryMapping("MOVIE|HD|1080p", TorznabCatType.MoviesHD); // HD 1080P + AddCategoryMapping("MOVIE|REMUXBR", TorznabCatType.MoviesBluRay); // REMUX BLURAY + AddCategoryMapping("MOVIE|FULLBR", TorznabCatType.MoviesBluRay); // FULL BLURAY + + // Series + AddCategoryMapping("TV|SD|VOSTFR", TorznabCatType.TV); // SD VOSTFR + AddCategoryMapping("TV|HD|VOSTFR", TorznabCatType.TVHD); // HD VOSTFR + AddCategoryMapping("TV|SD|VF", TorznabCatType.TVSD); // SD VF + AddCategoryMapping("TV|HD|VF", TorznabCatType.TVHD); // HD VF + AddCategoryMapping("TV|PACK|FR", TorznabCatType.TVOTHER); // PACK FR + AddCategoryMapping("TV|PACK|VOSTFR", TorznabCatType.TVOTHER); // PACK VOSTFR + AddCategoryMapping("TV|EMISSIONS", TorznabCatType.TVOTHER); // EMISSIONS + + // Anime + AddCategoryMapping("ANIME", TorznabCatType.TVAnime); // ANIME + + // Documentaries + AddCategoryMapping("DOCS", TorznabCatType.TVDocumentary); // DOCS + + // Music + AddCategoryMapping("MUSIC|FLAC", TorznabCatType.AudioLossless); // FLAC + AddCategoryMapping("MUSIC|MP3", TorznabCatType.AudioMP3); // MP3 + AddCategoryMapping("MUSIC|CONCERT", TorznabCatType.AudioVideo); // CONCERT + + // Other + AddCategoryMapping("PC|APP", TorznabCatType.PC); // PC + AddCategoryMapping("PC|GAMES", TorznabCatType.PCGames); // GAMES + AddCategoryMapping("EBOOKS", TorznabCatType.BooksEbook); // EBOOKS + } + + /// <summary> + /// Configure our WiHD Provider + /// </summary> + /// <param name="configJson">Our params in Json</param> + /// <returns>Configuration state</returns> + public async Task<IndexerConfigurationStatus> ApplyConfiguration(JToken configJson) + { + // Retrieve config values set by Jackett's user + ConfigData.LoadValuesFromJson(configJson); + + // Check & Validate Config + validateConfig(); + + // Setting our data for a better emulated browser (maximum security) + // TODO: Encoded Content not supported by Jackett at this time + // emulatedBrowserHeaders.Add("Accept-Encoding", "gzip, deflate"); + + // If we want to simulate a browser + if (ConfigData.Browser.Value) { + + // Clean headers + emulatedBrowserHeaders.Clear(); + + // Inject headers + emulatedBrowserHeaders.Add("Accept", ConfigData.HeaderAccept.Value); + emulatedBrowserHeaders.Add("Accept-Language", ConfigData.HeaderAcceptLang.Value); + emulatedBrowserHeaders.Add("DNT", Convert.ToInt32(ConfigData.HeaderDNT.Value).ToString()); + emulatedBrowserHeaders.Add("Upgrade-Insecure-Requests", Convert.ToInt32(ConfigData.HeaderUpgradeInsecure.Value).ToString()); + emulatedBrowserHeaders.Add("User-Agent", ConfigData.HeaderUserAgent.Value); + } + + + // Getting login form to retrieve CSRF token + var myRequest = new Utils.Clients.WebRequest() + { + Url = LoginUrl + }; + + // Add our headers to request + myRequest.Headers = emulatedBrowserHeaders; + + // Building login form data + var pairs = new Dictionary<string, string> { + { "username", ConfigData.Username.Value }, + { "password", ConfigData.Password.Value }, + { "keeplogged", "1" }, + { "login", "Connexion" } + }; + + // Do the login + var request = new Utils.Clients.WebRequest(){ + PostData = pairs, + Referer = LoginUrl, + Type = RequestType.POST, + Url = LoginUrl, + Headers = emulatedBrowserHeaders + }; + + // Perform loggin + latencyNow(); + output("\nPerform loggin.. with " + LoginUrl); + var response = await webclient.GetString(request); + + // Test if we are logged in + await ConfigureIfOK(response.Cookies, response.Cookies.Contains("session="), () => + { + // Parse error page + CQ dom = response.Content; + string message = dom[".warning"].Text().Split('.').Reverse().Skip(1).First(); + + // Try left + string left = dom[".info"].Text().Trim(); + + // Oops, unable to login + output("-> Login failed: \"" + message + "\" and " + left + " tries left before being banned for 6 hours !", "error"); + throw new ExceptionWithConfigData("Login failed: " + message, configData); + }); + + output("-> Login Success"); + + return IndexerConfigurationStatus.RequiresTesting; + } + + /// <summary> + /// Execute our search query + /// </summary> + /// <param name="query">Query</param> + /// <returns>Releases</returns> + public async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query) + { + var releases = new List<ReleaseInfo>(); + var torrentRowList = new List<CQ>(); + var searchTerm = query.GetQueryString(); + var searchUrl = SearchUrl; + int nbResults = 0; + int pageLinkCount = 0; + + // Check cache first so we don't query the server (if search term used or not in dev mode) + if(!DevMode && !string.IsNullOrEmpty(searchTerm)) + { + lock (cache) + { + // Remove old cache items + CleanCache(); + + // Search in cache + var cachedResult = cache.Where(i => i.Query == searchTerm).FirstOrDefault(); + if (cachedResult != null) + return cachedResult.Results.Select(s => (ReleaseInfo)s.Clone()).ToArray(); + } + } + + // Build our query + var request = buildQuery(searchTerm, query, searchUrl); + + // Getting results & Store content + WebClientStringResult results = await queryExec(request); + fDom = results.Content; + + try + { + // Find torrent rows + var firstPageRows = findTorrentRows(); + + // Add them to torrents list + torrentRowList.AddRange(firstPageRows.Select(fRow => fRow.Cq())); + + // Check if there are pagination links at bottom + Boolean pagination = (fDom[".linkbox > a"].Length != 0); + + // If pagination available + if (pagination) { + // Calculate numbers of pages available for this search query (Based on number results and number of torrents on first page) + pageLinkCount = ParseUtil.CoerceInt(Regex.Match(fDom[".linkbox > a"].Last().Attr("href").ToString(), @"\d+").Value); + + // Calculate average number of results (based on torrents rows lenght on first page) + nbResults = firstPageRows.Count() * pageLinkCount; + } + else { + // Check if we have a minimum of one result + if (firstPageRows.Length >= 1) + { + // Retrieve total count on our alone page + nbResults = firstPageRows.Count(); + pageLinkCount = 1; + } + else + { + output("\nNo result found for your query, please try another search term ...\n", "info"); + // No result found for this query + return releases; + } + } + output("\nFound " + nbResults + " result(s) (+/- " + firstPageRows.Length + ") in " + pageLinkCount + " page(s) for this query !"); + output("\nThere are " + firstPageRows.Length + " results on the first page !"); + + // If we have a term used for search and pagination result superior to one + if (!string.IsNullOrWhiteSpace(query.GetQueryString()) && pageLinkCount > 1) + { + // Starting with page #2 + for (int i = 2; i <= Math.Min(Int32.Parse(ConfigData.Pages.Value), pageLinkCount); i++) + { + output("\nProcessing page #" + i); + + // Request our page + latencyNow(); + + // Build our query + var pageRequest = buildQuery(searchTerm, query, searchUrl, i); + + // Getting results & Store content + WebClientStringResult pageResults = await queryExec(pageRequest); + + // Assign response + fDom = pageResults.Content; + + // Process page results + var additionalPageRows = findTorrentRows(); + + // Add them to torrents list + torrentRowList.AddRange(additionalPageRows.Select(fRow => fRow.Cq())); + } + } + else + { + // No search term, maybe testing... so registring autkey and torrentpass for future uses + string infosData = firstPageRows.First().Find("td:eq(3) > a").Attr("href"); + IList<string> infosList = infosData.Split('&').Select(s => s.Trim()).Where(s => s != String.Empty).ToList(); + IList<string> infosTracker = infosList.Select(s => s.Split(new[] { '=' }, 2)[1].Trim()).ToList(); + + output("\nStoring Authkey for future uses..."); + ConfigData.AuthKey.Value = infosTracker[2]; + + output("\nStoring TorrentPass for future uses..."); + ConfigData.TorrentPass.Value = infosTracker[3]; + + } + + // Loop on results + foreach (CQ tRow in torrentRowList) + { + output("\n=>> Torrent #" + (releases.Count + 1)); + + // ID + int id = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(1) > a").Attr("href").ToString(), @"\d+").Value); + output("ID: " + id); + + // Release Name + string name = tRow.Find("td:eq(1) > a").Text().ToString(); + output("Release: " + name); + + // Category + string categoryID = tRow.Find("td:eq(0) > a").Attr("href").Replace("torrents.php?cat[]=", String.Empty); + output("Category: " + MapTrackerCatToNewznab(categoryID) + " (" + categoryID + ")"); + + // Seeders + int seeders = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(6)").Text(), @"\d+").Value); + output("Seeders: " + seeders); + + // Leechers + int leechers = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(7)").Text(), @"\d+").Value); + output("Leechers: " + leechers); + + // Completed + int completed = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(5)").Text(), @"\d+").Value); + output("Completed: " + completed); + + // Size + string sizeStr = tRow.Find("td:eq(4)").Text().Replace("Go", "gb").Replace("Mo", "mb").Replace("Ko", "kb"); + long size = ReleaseInfo.GetBytes(sizeStr); + output("Size: " + sizeStr + " (" + size + " bytes)"); + + // Publish DateToString + IList<string> clockList = tRow.Find("td:eq(2) > span").Text().Replace("Il y a", "").Split(',').Select(s => s.Trim()).Where(s => s != String.Empty).ToList(); + var date = agoToDate(clockList); + output("Released on: " + date.ToLocalTime()); + + // Torrent Details URL + Uri detailsLink = new Uri(TorrentDescriptionUrl + id); + output("Details: " + detailsLink.AbsoluteUri); + + // Torrent Comments URL + Uri commentsLink = new Uri(TorrentCommentUrl + id); + output("Comments Link: " + commentsLink.AbsoluteUri); + + // Torrent Download URL + Uri downloadLink = new Uri(TorrentDownloadUrl.Replace("{id}", id.ToString()).Replace("{auth_key}", ConfigData.AuthKey.Value).Replace("{torrent_pass}", ConfigData.TorrentPass.Value)); + output("Download Link: " + downloadLink.AbsoluteUri); + + // Building release infos + var release = new ReleaseInfo(); + release.Category = MapTrackerCatToNewznab(categoryID.ToString()); + release.Title = name; + release.Seeders = seeders; + release.Peers = seeders + leechers; + release.MinimumRatio = 1; + release.MinimumSeedTime = 172800; + release.PublishDate = date; + release.Size = size; + release.Guid = detailsLink; + release.Comments = commentsLink; + release.Link = downloadLink; + releases.Add(release); + } + + } + catch (Exception ex) + { + OnParseError("Error, unable to parse result \n" + ex.StackTrace, ex); + } + + // Return found releases + return releases; + } + + /// <summary> + /// Build query to process + /// </summary> + /// <param name="term">Term to search</param> + /// <param name="query">Torznab Query for categories mapping</param> + /// <param name="url">Search url for provider</param> + /// <param name="page">Page number to request</param> + /// <returns>URL to query for parsing and processing results</returns> + private string buildQuery(string term, TorznabQuery query, string url, int page = 0) + { + var parameters = new NameValueCollection(); + List<string> categoriesList = MapTorznabCapsToTrackers(query); + string categories = null; + + // Check if we are processing a new page + if (page > 0) + { + // Adding page number to query + parameters.Add("page", page.ToString()); + } + + // Loop on Categories needed + foreach (string category in categoriesList) + { + // If last, build ! + if (categoriesList.Last() == category) + { + // Adding previous categories to URL with latest category + parameters.Add(Uri.EscapeDataString("cat[]"), HttpUtility.UrlEncode(category) + categories); + } + else + { + // Build categories parameter + categories += "&" + Uri.EscapeDataString("cat[]") + "=" + HttpUtility.UrlEncode(category); + } + } + + // If search term provided + if (!string.IsNullOrWhiteSpace(term)) + { + // Add search term + parameters.Add("search", HttpUtility.UrlEncode(term)); + } + else + { + parameters.Add("search", HttpUtility.UrlEncode("%")); + // Showing all torrents (just for output function) + term = "all"; + } + + // Building our query -- Cannot use GetQueryString due to UrlEncode (generating wrong cat[] param) + url += "?" + string.Join("&", parameters.AllKeys.Select(a => a + "=" + parameters[a])); + + output("\nBuilded query for \"" + term + "\"... " + url); + + // Return our search url + return url; + } + + /// <summary> + /// Switch Method for Querying + /// </summary> + /// <param name="request">URL created by Query Builder</param> + /// <returns>Results from query</returns> + private async Task<WebClientStringResult> queryExec(string request) + { + WebClientStringResult results = null; + + // Switch in we are in DEV mode with Hard Drive Cache or not + if (DevMode && CacheMode) + { + // Check Cache before querying and load previous results if available + results = await queryCache(request); + } + else + { + // Querying tracker directly + results = await queryTracker(request); + } + return results; + } + + /// <summary> + /// Get Torrents Page from Cache by Query Provided + /// </summary> + /// <param name="request">URL created by Query Builder</param> + /// <returns>Results from query</returns> + private async Task<WebClientStringResult> queryCache(string request) + { + WebClientStringResult results = null; + + // Create Directory if not exist + System.IO.Directory.CreateDirectory(directory); + + // Clean Storage Provider Directory from outdated cached queries + cleanCacheStorage(); + + // Create fingerprint for request + string file = directory + request.GetHashCode() + ".json"; + + // Checking modes states + if (System.IO.File.Exists(file)) + { + // File exist... loading it right now ! + output("Loading results from hard drive cache ..." + request.GetHashCode() + ".json"); + results = JsonConvert.DeserializeObject<WebClientStringResult>(System.IO.File.ReadAllText(file)); + } + else + { + // No cached file found, querying tracker directly + results = await queryTracker(request); + + // Cached file didn't exist for our query, writing it right now ! + output("Writing results to hard drive cache ..." + request.GetHashCode() + ".json"); + System.IO.File.WriteAllText(file, JsonConvert.SerializeObject(results)); + } + return results; + } + + /// <summary> + /// Get Torrents Page from Tracker by Query Provided + /// </summary> + /// <param name="request">URL created by Query Builder</param> + /// <returns>Results from query</returns> + private async Task<WebClientStringResult> queryTracker(string request) + { + WebClientStringResult results = null; + + // Cache mode not enabled or cached file didn't exist for our query + output("\nQuerying tracker for results...."); + + // Request our first page + latencyNow(); + results = await RequestStringWithCookiesAndRetry(request, null, null, emulatedBrowserHeaders); + + // Return results from tracker + return results; + } + + /// <summary> + /// Clean Hard Drive Cache Storage + /// </summary> + /// <param name="force">Force Provider Folder deletion</param> + private void cleanCacheStorage(Boolean force = false) + { + // Check cleaning method + if(force) + { + // Deleting Provider Storage folder and all files recursively + output("\nDeleting Provider Storage folder and all files recursively ..."); + + // Check if directory exist + if(System.IO.Directory.Exists(directory)) + { + // Delete storage directory of provider + System.IO.Directory.Delete(directory, true); + output("-> Storage folder deleted successfully."); + } + else + { + // No directory, so nothing to do + output("-> No Storage folder found for this provider !"); + } + } + else + { + int i = 0; + // Check if there is file older than ... and delete them + output("\nCleaning Provider Storage folder... in progress."); + System.IO.Directory.GetFiles(directory) + .Select(f => new System.IO.FileInfo(f)) + .Where(f => f.LastAccessTime < DateTime.Now.AddMilliseconds(-Convert.ToInt32(ConfigData.HardDriveCacheKeepTime.Value))) + .ToList() + .ForEach(f => { + output("Deleting cached file << " + f.Name + " >> ... done."); + f.Delete(); + i++; + }); + + // Inform on what was cleaned during process + if(i > 0) { + output("-> Deleted " + i + " cached files during cleaning."); + } + else { + output("-> Nothing deleted during cleaning."); + } + } + } + + /// <summary> + /// Generate a random fake latency to avoid detection on tracker side + /// </summary> + private void latencyNow() + { + // Need latency ? + if(Latency) + { + // Generate a random value in our range + var random = new Random(DateTime.Now.Millisecond); + int waiting = random.Next(Convert.ToInt32(ConfigData.LatencyStart.Value), Convert.ToInt32(ConfigData.LatencyEnd.Value)); + output("\nLatency Faker => Sleeping for " + waiting + " ms..."); + + // Sleep now... + System.Threading.Thread.Sleep(waiting); + } + } + + /// <summary> + /// Find torrent rows in search pages + /// </summary> + /// <returns>JQuery Object</returns> + private CQ findTorrentRows() + { + // Return all occurencis of torrents found + return fDom[".torrent_table > tbody > tr"].Not(".colhead"); + } + + /// <summary> + /// Convert Ago date to DateTime + /// </summary> + /// <param name="clockList"></param> + /// <returns>A DateTime</returns> + private DateTime agoToDate(IList<string> clockList) + { + DateTime release = DateTime.Now; + foreach (var ago in clockList) + { + // Check for years + if (ago.Contains("années") || ago.Contains("année")) + { + // Number of years to remove + int years = ParseUtil.CoerceInt(Regex.Match(ago.ToString(), @"\d+").Value); + // Removing + release = release.AddYears(-years); + + continue; + } + // Check for months + else if (ago.Contains("mois")) + { + // Number of months to remove + int months = ParseUtil.CoerceInt(Regex.Match(ago.ToString(), @"\d+").Value); + // Removing + release = release.AddMonths(-months); + + continue; + } + // Check for weeks + else if (ago.Contains("semaines") || ago.Contains("semaine")) + { + // Number of weeks to remove + int weeks = ParseUtil.CoerceInt(Regex.Match(ago.ToString(), @"\d+").Value); + // Removing + release = release.AddDays(-(7 * weeks)); + + continue; + } + // Check for days + else if (ago.Contains("jours") || ago.Contains("jour")) + { + // Number of days to remove + int days = ParseUtil.CoerceInt(Regex.Match(ago.ToString(), @"\d+").Value); + // Removing + release = release.AddDays(-days); + + continue; + } + // Check for hours + else if (ago.Contains("heures") || ago.Contains("heure")) + { + // Number of hours to remove + int hours = ParseUtil.CoerceInt(Regex.Match(ago.ToString(), @"\d+").Value); + // Removing + release = release.AddHours(-hours); + + continue; + } + // Check for minutes + else if (ago.Contains("mins") || ago.Contains("min")) + { + // Number of minutes to remove + int minutes = ParseUtil.CoerceInt(Regex.Match(ago.ToString(), @"\d+").Value); + // Removing + release = release.AddMinutes(-minutes); + + continue; + } + // Check for seconds + else if (ago.Contains("secondes") || ago.Contains("seconde")) + { + // Number of seconds to remove + int seconds = ParseUtil.CoerceInt(Regex.Match(ago.ToString(), @"\d+").Value); + // Removing + release = release.AddSeconds(-seconds); + + continue; + } + else + { + output("Unable to detect release date of torrent", "error"); + //throw new Exception("Unable to detect release date of torrent"); + } + } + return release; + } + + /// <summary> + /// Output message for logging or developpment (console) + /// </summary> + /// <param name="message">Message to output</param> + /// <param name="level">Level for Logger</param> + private void output(string message, string level = "debug") + { + // Check if we are in dev mode + if(DevMode) + { + // Output message to console + Console.WriteLine(message); + } + else + { + // Send message to logger with level + switch (level) + { + default: + goto case "debug"; + case "debug": + // Only if Debug Level Enabled on Jackett + if (Engine.Logger.IsDebugEnabled) + { + logger.Debug(message); + } + break; + case "info": + logger.Info(message); + break; + case "error": + logger.Error(message); + break; + } + } + } + + /// <summary> + /// Validate Config entered by user on Jackett + /// </summary> + private void validateConfig() + { + output("\nValidating Settings ... \n"); + + // Check Username Setting + if (string.IsNullOrEmpty(ConfigData.Username.Value)) + { + throw new ExceptionWithConfigData("You must provide a username for this tracker to login !", ConfigData); + } + else + { + output("Validated Setting -- Username (auth) => " + ConfigData.Username.Value.ToString()); + } + + // Check Password Setting + if (string.IsNullOrEmpty(ConfigData.Password.Value)) + { + throw new ExceptionWithConfigData("You must provide a password with your username for this tracker to login !", ConfigData); + } + else + { + output("Validated Setting -- Password (auth) => " + ConfigData.Password.Value.ToString()); + } + + // Check Max Page Setting + if (!string.IsNullOrEmpty(ConfigData.Pages.Value)) + { + try + { + output("Validated Setting -- Max Pages => " + Convert.ToInt32(ConfigData.Pages.Value)); + } + catch (Exception) + { + throw new ExceptionWithConfigData("Please enter a numeric maximum number of pages to crawl !", ConfigData); + } + } + else + { + throw new ExceptionWithConfigData("Please enter a maximum number of pages to crawl !", ConfigData); + } + + // Check Latency Setting + if (ConfigData.Latency.Value) + { + output("\nValidated Setting -- Latency Simulation enabled"); + + // Check Latency Start Setting + if (!string.IsNullOrEmpty(ConfigData.LatencyStart.Value)) + { + try + { + output("Validated Setting -- Latency Start => " + Convert.ToInt32(ConfigData.LatencyStart.Value)); + } + catch (Exception) + { + throw new ExceptionWithConfigData("Please enter a numeric latency start in ms !", ConfigData); + } + } + else + { + throw new ExceptionWithConfigData("Latency Simulation enabled, Please enter a start latency !", ConfigData); + } + + // Check Latency End Setting + if (!string.IsNullOrEmpty(ConfigData.LatencyEnd.Value)) + { + try + { + output("Validated Setting -- Latency End => " + Convert.ToInt32(ConfigData.LatencyEnd.Value)); + } + catch (Exception) + { + throw new ExceptionWithConfigData("Please enter a numeric latency end in ms !", ConfigData); + } + } + else + { + throw new ExceptionWithConfigData("Latency Simulation enabled, Please enter a end latency !", ConfigData); + } + } + + // Check Browser Setting + if (ConfigData.Browser.Value) + { + output("\nValidated Setting -- Browser Simulation enabled"); + + // Check ACCEPT header Setting + if (string.IsNullOrEmpty(ConfigData.HeaderAccept.Value)) + { + throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an ACCEPT header !", ConfigData); + } + else + { + output("Validated Setting -- ACCEPT (header) => " + ConfigData.HeaderAccept.Value.ToString()); + } + + // Check ACCEPT-LANG header Setting + if (string.IsNullOrEmpty(ConfigData.HeaderAcceptLang.Value)) + { + throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an ACCEPT-LANG header !", ConfigData); + } + else + { + output("Validated Setting -- ACCEPT-LANG (header) => " + ConfigData.HeaderAcceptLang.Value.ToString()); + } + + // Check USER-AGENT header Setting + if (string.IsNullOrEmpty(ConfigData.HeaderUserAgent.Value)) + { + throw new ExceptionWithConfigData("Browser Simulation enabled, Please enter an USER-AGENT header !", ConfigData); + } + else + { + output("Validated Setting -- USER-AGENT (header) => " + ConfigData.HeaderUserAgent.Value.ToString()); + } + } + + // Check Dev Cache Settings + if (ConfigData.HardDriveCache.Value == true) + { + output("\nValidated Setting -- DEV Hard Drive Cache enabled"); + + // Check if Dev Mode enabled ! + if (!ConfigData.DevMode.Value) + { + throw new ExceptionWithConfigData("Hard Drive is enabled but not in DEV MODE, Please enable DEV MODE !", ConfigData); + } + + // Check Cache Keep Time Setting + if (!string.IsNullOrEmpty(ConfigData.HardDriveCacheKeepTime.Value)) + { + try + { + output("Validated Setting -- Cache Keep Time (ms) => " + Convert.ToInt32(ConfigData.HardDriveCacheKeepTime.Value)); + } + catch (Exception) + { + throw new ExceptionWithConfigData("Please enter a numeric hard drive keep time in ms !", ConfigData); + } + } + else + { + throw new ExceptionWithConfigData("Hard Drive Cache enabled, Please enter a maximum keep time for cache !", ConfigData); + } + } + else + { + // Delete cache if previously existed + cleanCacheStorage(true); + } + } + } +} \ No newline at end of file diff --git a/src/Jackett/Jackett.csproj b/src/Jackett/Jackett.csproj index e7ce95d46d3693944ce2a4795bab000cc8f5f901..3be05558ae63461812abaa059459943ab3232b0f 100644 --- a/src/Jackett/Jackett.csproj +++ b/src/Jackett/Jackett.csproj @@ -195,6 +195,7 @@ <Compile Include="Indexers\Demonoid.cs" /> <Compile Include="Indexers\BroadcastTheNet.cs" /> <Compile Include="Indexers\DanishBits.cs" /> + <Compile Include="Indexers\Abnormal.cs" /> <Compile Include="Indexers\Fnt.cs" /> <Compile Include="Indexers\GFTracker.cs" /> <Compile Include="Indexers\RevolutionTT.cs" /> @@ -214,6 +215,7 @@ <Compile Include="Models\GitHub\Asset.cs" /> <Compile Include="Models\GitHub\Release.cs" /> <Compile Include="Models\IndexerConfig\Bespoke\ConfigurationDataBlueTigers.cs" /> + <Compile Include="Models\IndexerConfig\Bespoke\ConfigurationDataAbnormal.cs" /> <Compile Include="Models\IndexerConfig\Bespoke\ConfigurationDataFnt.cs" /> <Compile Include="Models\IndexerConfig\Bespoke\ConfigurationDataFrenchADN.cs" /> <Compile Include="Models\IndexerConfig\Bespoke\ConfigurationDataWiHD.cs" /> @@ -576,6 +578,9 @@ <Content Include="Content\logos\frenchadn.png"> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> </Content> + <Content Include="Content\logos\abnormal.png"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </Content> <Content Include="Content\logos\wihd.png"> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> </Content> diff --git a/src/Jackett/Models/IndexerConfig/Bespoke/ConfigurationDataAbnormal.cs b/src/Jackett/Models/IndexerConfig/Bespoke/ConfigurationDataAbnormal.cs new file mode 100644 index 0000000000000000000000000000000000000000..13a9b7bdf340775135f956a9d3a756d82e8c9cd6 --- /dev/null +++ b/src/Jackett/Models/IndexerConfig/Bespoke/ConfigurationDataAbnormal.cs @@ -0,0 +1,57 @@ +namespace Jackett.Models.IndexerConfig.Bespoke +{ + class ConfigurationDataAbnormal : ConfigurationData + { + public HiddenItem AuthKey { get; set; } + public HiddenItem TorrentPass { get; set; } + public DisplayItem CredentialsWarning { get; private set; } + public StringItem Username { get; private set; } + public StringItem Password { get; private set; } + public DisplayItem PagesWarning { get; private set; } + public StringItem Pages { get; private set; } + public DisplayItem SecurityWarning { get; private set; } + public BoolItem Latency { get; private set; } + public BoolItem Browser { get; private set; } + public DisplayItem LatencyWarning { get; private set; } + public StringItem LatencyStart { get; private set; } + public StringItem LatencyEnd { get; private set; } + public DisplayItem HeadersWarning { get; private set; } + public StringItem HeaderAccept { get; private set; } + public StringItem HeaderAcceptLang { get; private set; } + public BoolItem HeaderDNT { get; private set; } + public BoolItem HeaderUpgradeInsecure { get; private set; } + public StringItem HeaderUserAgent { get; private set; } + public DisplayItem DevWarning { get; private set; } + public BoolItem DevMode { get; private set; } + public BoolItem HardDriveCache { get; private set; } + public StringItem HardDriveCacheKeepTime { get; private set; } + + public ConfigurationDataAbnormal() + : base() + { + AuthKey = new HiddenItem { Name = "AuthKey", Value = "" }; + TorrentPass = new HiddenItem { Name = "TorrentPass", Value = "" }; + CredentialsWarning = new DisplayItem("<b>Credentials Configuration</b> (<i>Private Tracker</i>),<br /><br /> <ul><li><b>Username</b> is your account name on this tracker.</li><li><b>Password</b> is your password associated to your account name.</li></ul>") { Name = "Credentials" }; + Username = new StringItem { Name = "Username (Required)", Value = "" }; + Password = new StringItem { Name = "Password (Required)", Value = "" }; + PagesWarning = new DisplayItem("<b>Preferences Configuration</b> (<i>Tweak your search settings</i>),<br /><br /> <ul><li><b>Max Pages to Process</b> let you specify how many page (max) Jackett can process when doing a search. Setting a value <b>higher than 4 is dangerous</b> for you account ! (<b>Result of too many requests to tracker...that <u>will be suspect</u></b>).</li><li><b>With Dead Torrents</b> let you search <u>with</u> torrents which are marked Dead.</li><li><b>Freeleech Only</b> let you search <u>only</u> for torrents which are marked Freeleech.</li><li><b>With Nuked Releases</b> let you search <u>with</u> torrents which are marked Nuked.</li><li><b>3D Releases Only</b> let you search <u>only</u> for torrents which are marked 3D.</li><li><b>French Only</b> let you search <u>only</u> for torrents which are marked FRENCH or TRUEFRENCH (<i>MULTI not included !</i>).</li></ul>") { Name = "Preferences" }; + Pages = new StringItem { Name = "Max Pages to Process (Required)", Value = "4" }; + SecurityWarning = new DisplayItem("<b>Security Configuration</b> (<i>Read this area carefully !</i>),<br /><br /> <ul><li><b>Latency Simulation</b> will simulate human browsing with Jacket by pausing Jacket for an random time between each request, to fake a real content browsing.</li><li><b>Browser Simulation</b> will simulate a real human browser by injecting additionals headers when doing requests to tracker.</li></ul>") { Name = "Security" }; + Latency = new BoolItem() { Name = "Latency Simulation (Optional)", Value = false }; + Browser = new BoolItem() { Name = "Browser Simulation (Optional)", Value = true }; + LatencyWarning = new DisplayItem("<b>Latency Configuration</b> (<i>Required if latency simulation enabled</i>),<br /><br/> <ul><li>By filling this range, <b>Jackett will make a random timed pause</b> <u>between requests</u> to tracker <u>to simulate a real browser</u>.</li><li>MilliSeconds <b>only</b></li></ul>") { Name = "Simulate Latency" }; + LatencyStart = new StringItem { Name = "Minimum Latency (ms)", Value = "1589" }; + LatencyEnd = new StringItem { Name = "Maximum Latency (ms)", Value = "3674" }; + HeadersWarning = new DisplayItem("<b>Browser Headers Configuration</b> (<i>Required if browser simulation enabled</i>),<br /><br /> <ul><li>By filling these fields, <b>Jackett will inject headers</b> with your values <u>to simulate a real browser</u>.</li><li>You can get <b>your browser values</b> here: <a href='https://www.whatismybrowser.com/detect/what-http-headers-is-my-browser-sending' target='blank'>www.whatismybrowser.com</a></li></ul><br /><i><b>Note that</b> some headers are not necessary because they are injected automatically by this provider such as Accept_Encoding, Connection, Host or X-Requested-With</i>") { Name = "Injecting headers" }; + HeaderAccept = new StringItem { Name = "Accept", Value = "" }; + HeaderAcceptLang = new StringItem { Name = "Accept-Language", Value = "" }; + HeaderDNT = new BoolItem { Name = "DNT", Value = false }; + HeaderUpgradeInsecure = new BoolItem { Name = "Upgrade-Insecure-Requests", Value = false }; + HeaderUserAgent = new StringItem { Name = "User-Agent", Value = "" }; + DevWarning = new DisplayItem("<b>Development Facility</b> (<i>For Developers ONLY</i>),<br /><br /> <ul><li>By enabling development mode, <b>Jackett will bypass his cache</b> and will <u>output debug messages to console</u> instead of his log file.</li><li>By enabling Hard Drive Cache, <b>This provider</b> will <u>save each query answers from tracker</u> in temp directory, in fact this reduce drastically HTTP requests when building a provider at parsing step for example. So, <b> Jackett will search for a cached query answer on hard drive before executing query on tracker side !</b> <i>DEV MODE must be enabled to use it !</li></ul>") { Name = "Development" }; + DevMode = new BoolItem { Name = "Enable DEV MODE (Developers ONLY)", Value = false }; + HardDriveCache = new BoolItem { Name = "Enable HARD DRIVE CACHE (Developers ONLY)", Value = false }; + HardDriveCacheKeepTime = new StringItem { Name = "Keep Cached files for (ms)", Value = "300000" }; + } + } +}