|
|
@@ -1,5 +1,4 @@
|
|
|
-using Microsoft.Extensions.Caching.Memory;
|
|
|
-using Microsoft.Win32;
|
|
|
+using Microsoft.Win32;
|
|
|
using SharpCompress.Archives;
|
|
|
using SharpCompress.Archives.Rar;
|
|
|
using SharpCompress.Archives.Zip;
|
|
|
@@ -14,6 +13,7 @@ using System.Linq;
|
|
|
using System.Net.Http;
|
|
|
using System.Text;
|
|
|
using System.Threading.Tasks;
|
|
|
+using System.Web;
|
|
|
|
|
|
namespace Masuit.Tools.Files
|
|
|
{
|
|
|
@@ -23,8 +23,6 @@ namespace Masuit.Tools.Files
|
|
|
public class SevenZipCompressor : ISevenZipCompressor
|
|
|
{
|
|
|
private readonly HttpClient _httpClient;
|
|
|
- private static readonly MemoryCache _memoryCache = new MemoryCache(new MemoryCacheOptions());
|
|
|
- internal static bool EnableCache { get; set; }
|
|
|
|
|
|
/// <summary>
|
|
|
///
|
|
|
@@ -91,6 +89,7 @@ namespace Masuit.Tools.Files
|
|
|
{
|
|
|
dir = Path.GetDirectoryName(rar);
|
|
|
}
|
|
|
+
|
|
|
using (var archive = RarArchive.Open(rar))
|
|
|
{
|
|
|
var entries = ignoreEmptyDir ? archive.Entries.Where(entry => !entry.IsDirectory) : archive.Entries;
|
|
|
@@ -117,6 +116,7 @@ namespace Masuit.Tools.Files
|
|
|
{
|
|
|
dir = Path.GetDirectoryName(compressedFile);
|
|
|
}
|
|
|
+
|
|
|
using (Stream stream = File.OpenRead(compressedFile))
|
|
|
{
|
|
|
using (var reader = ReaderFactory.Open(stream))
|
|
|
@@ -157,43 +157,51 @@ namespace Masuit.Tools.Files
|
|
|
{
|
|
|
var archive = ZipArchive.Create();
|
|
|
var dic = GetFileEntryMaps(files);
|
|
|
- var remoteFiles = files.Where(s => s.StartsWith("http")).ToList();
|
|
|
+ var remoteUrls = files.Distinct().Where(s => s.StartsWith("http")).Select(s =>
|
|
|
+ {
|
|
|
+ try
|
|
|
+ {
|
|
|
+ return new Uri(s);
|
|
|
+ }
|
|
|
+ catch (UriFormatException)
|
|
|
+ {
|
|
|
+ return null;
|
|
|
+ }
|
|
|
+ }).Where(u => u != null).ToList();
|
|
|
foreach (var fileEntry in dic)
|
|
|
{
|
|
|
archive.AddEntry(Path.Combine(rootdir, fileEntry.Value), fileEntry.Key);
|
|
|
}
|
|
|
- if (remoteFiles.Any())
|
|
|
+
|
|
|
+ if (remoteUrls.Any())
|
|
|
{
|
|
|
- //var paths = remoteFiles.Select(s => new Uri(s).PathAndQuery).ToList();
|
|
|
- //string pathname = new string(paths.First().Substring(0, paths.Min(s => s.Length)).TakeWhile((c, i) => paths.All(s => s[i] == c)).ToArray());
|
|
|
- //Dictionary<string, string> pathDic = paths.ToDictionary(s => s, s => s.Substring(pathname.Length));
|
|
|
- Parallel.ForEach(remoteFiles, url =>
|
|
|
+ //var dicList = remoteUrls.GroupBy(u => u.Authority).Select(g =>
|
|
|
+ //{
|
|
|
+ // if (g.Count() > 1)
|
|
|
+ // {
|
|
|
+ // string pathname = new string(g.First().AbsolutePath.Substring(0, g.Min(s => s.AbsolutePath.Length)).TakeWhile((c, i) => g.All(s => s.AbsolutePath[i] == c)).ToArray());
|
|
|
+ // return g.ToDictionary(s => s, s => HttpUtility.UrlDecode(s.AbsolutePath.Substring(pathname.Length)));
|
|
|
+ // }
|
|
|
+ // return g.ToDictionary(s => s, s => Path.GetFileName(HttpUtility.UrlDecode(s.AbsolutePath)));
|
|
|
+ //}).SelectMany(d => d).ToDictionary(x => x.Key, x => x.Value);
|
|
|
+ Parallel.ForEach(remoteUrls, url =>
|
|
|
{
|
|
|
- if (_memoryCache.TryGetValue(url, out Stream ms))
|
|
|
+ _httpClient.GetAsync(url).ContinueWith(async t =>
|
|
|
{
|
|
|
- archive.AddEntry(Path.Combine(rootdir, Path.GetFileName(new Uri(url).AbsolutePath.Trim('/'))), ms);
|
|
|
- }
|
|
|
- else
|
|
|
- {
|
|
|
- _httpClient.GetAsync(url).ContinueWith(async t =>
|
|
|
+ if (t.IsCompleted)
|
|
|
{
|
|
|
- if (t.IsCompleted)
|
|
|
+ var res = await t;
|
|
|
+ if (res.IsSuccessStatusCode)
|
|
|
{
|
|
|
- var res = await t;
|
|
|
- if (res.IsSuccessStatusCode)
|
|
|
- {
|
|
|
- Stream stream = await res.Content.ReadAsStreamAsync();
|
|
|
- archive.AddEntry(Path.Combine(rootdir, Path.GetFileName(new Uri(url).AbsolutePath.Trim('/'))), stream);
|
|
|
- if (EnableCache)
|
|
|
- {
|
|
|
- _memoryCache.Set(url, stream, TimeSpan.FromMinutes(10));
|
|
|
- }
|
|
|
- }
|
|
|
+ Stream stream = await res.Content.ReadAsStreamAsync();
|
|
|
+ //archive.AddEntry(Path.Combine(rootdir, pathDic[new Uri(url).AbsolutePath.Trim('/')]), stream);
|
|
|
+ archive.AddEntry(Path.Combine(rootdir, Path.GetFileName(HttpUtility.UrlDecode(url.AbsolutePath))), stream);
|
|
|
}
|
|
|
- }).Wait();
|
|
|
- }
|
|
|
+ }
|
|
|
+ }).Wait();
|
|
|
});
|
|
|
}
|
|
|
+
|
|
|
return archive;
|
|
|
}
|
|
|
|
|
|
@@ -236,11 +244,13 @@ namespace Masuit.Tools.Files
|
|
|
{
|
|
|
return new Dictionary<string, string>();
|
|
|
}
|
|
|
+
|
|
|
string dirname = new string(fileList.First().Substring(0, fileList.Min(s => s.Length)).TakeWhile((c, i) => fileList.All(s => s[i] == c)).ToArray());
|
|
|
Dictionary<string, string> dic = fileList.ToDictionary(s => s, s => s.Substring(dirname.Length));
|
|
|
return dic;
|
|
|
}
|
|
|
}
|
|
|
+
|
|
|
/// <summary>
|
|
|
/// SharpZip
|
|
|
/// </summary>
|
|
|
@@ -284,7 +294,6 @@ namespace Masuit.Tools.Files
|
|
|
public static bool UnpackFiles(string file, string dir)
|
|
|
{
|
|
|
throw new NotImplementedException("该方法已过时,请使用SevenZipCompressor.Decompress方法替代");
|
|
|
-
|
|
|
}
|
|
|
|
|
|
/// <summary>
|