add tools

This commit is contained in:
Kieran 2018-11-30 19:33:40 +08:00
parent f57e46c779
commit e9165a2276
10 changed files with 816 additions and 15 deletions

View File

@ -1,14 +1,15 @@
<?php
include_once("init.php");
StaticRedis::Connect();
Config::LoadConfig(array("upload_folder"));
var_dump($_SERVER);
if(StaticRedis::Connect()) {
echo "Connected to redis..\n";
$fs = new FileStore(Config::$Instance->upload_folder, $_SERVER["cron_root"]);
Config::LoadConfig(array("upload_folder"));
echo "Loading stats for: " . $fs->GetUploadDirAbsolute() . "\n";
//echo "\n\t" . implode("\n\t", $fs->ListFiles()) . "\n";
Stats::Collect($fs);
if(StaticRedis::$IsConnectedToSlave == False) {
echo "Runing master node tasks..\n";
$fs = new FileStore(Config::$Instance->upload_folder, $_SERVER["cron_root"]);
Stats::Collect($fs);
}
}
?>

View File

@ -1,15 +1,16 @@
<?php
class StaticRedis {
public static $Instance = NULL;
public static $MasterInstance = NULL;
public static $Instance = null;
public static $MasterInstance = null;
public static $IsConnectedToSlave = false;
public static function ReadOp() : object {
return self::$Instance;
}
public static function WriteOp() : object {
if(self::$MasterInstance != NULL){
if(self::$MasterInstance != null){
return self::$MasterInstance;
} else {
return self::$Instance;
@ -20,8 +21,9 @@
self::$Instance = new Redis();
$con = self::$Instance->pconnect(REDIS_CONFIG);
if($con){
$rep = self::$Instance->info("REPLICATION");
$rep = self::$Instance->info();
if($rep["role"] == "slave"){
self::$IsConnectedToSlave = true;
self::$MasterInstance = new Redis();
$mcon = self::$MasterInstance->pconnect($rep["master_host"], $rep["master_port"]);
return $con && $mcon;
@ -29,7 +31,5 @@
}
return $con;
}
}
?>

View File

@ -49,7 +49,10 @@
"url" => (isset($_SERVER['HTTPS']) ? "https" : "http") . "://$_SERVER[HTTP_HOST]$_SERVER[REQUEST_URI]",
"cip" => USER_IP,
"ua" => isset($_SERVER["HTTP_USER_AGENT"]) ? $_SERVER["HTTP_USER_AGENT"] : "",
"urlref" => isset($_SERVER["HTTP_REFERER"]) ? $_SERVER["HTTP_REFERER"] : ""
"urlref" => isset($_SERVER["HTTP_REFERER"]) ? $_SERVER["HTTP_REFERER"] : "",
"h" => date("H"),
"m" => date("i"),
"s" => date("s")
));
//this should be sent to the slave node if we are connected on a slave

View File

@ -0,0 +1,25 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
VisualStudioVersion = 15.0.27703.2042
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "void_util", "void_util\void_util.csproj", "{6AE35BCD-015B-4316-8783-7AF6D75DE7E9}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{6AE35BCD-015B-4316-8783-7AF6D75DE7E9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{6AE35BCD-015B-4316-8783-7AF6D75DE7E9}.Debug|Any CPU.Build.0 = Debug|Any CPU
{6AE35BCD-015B-4316-8783-7AF6D75DE7E9}.Release|Any CPU.ActiveCfg = Release|Any CPU
{6AE35BCD-015B-4316-8783-7AF6D75DE7E9}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {90C997D3-2657-426B-A247-555831779BD3}
EndGlobalSection
EndGlobal

View File

@ -0,0 +1,212 @@
using System;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace void_util
{
public class ChunkStream : Stream, IDisposable
{
private static byte[] CRLF = new byte[] { 13, 10 };
private bool LeaveOpen { get; set; } = false;
private byte[] InternalBuffer { get; set; }
private Stream BaseStream { get; set; }
private int ReadingChunkSize { get; set; } = -1;
private int ReadOffset { get; set; } = 0;
private int LoadOffset { get; set; }
private int Loaded { get; set; }
public override bool CanRead => BaseStream.CanRead;
public override bool CanSeek => false;
public override bool CanWrite => BaseStream.CanWrite;
public override long Length => Loaded - ReadOffset;
public override long Position { get => 0; set { ; } }
public override void Flush()
{
BaseStream.Flush();
}
/// <summary>
/// Adds data to the start of the read buffer
/// </summary>
/// <param name="buffer"></param>
/// <param name="offset"></param>
/// <param name="count"></param>
public void PreLoadBuffer(byte[] buffer, int offset, int count)
{
if(count > InternalBuffer.Length)
{
throw new Exception("Cant preload data larger than our buffer");
}
Array.Copy(buffer, offset, InternalBuffer, 0, count);
Loaded += count;
LoadOffset += count;
}
private async Task<bool> BufferSomeAsync(CancellationToken cancellationToken)
{
if(Loaded == InternalBuffer.Length)
{
return true;
}
var rlen = await BaseStream.ReadAsync(InternalBuffer, LoadOffset, InternalBuffer.Length - Loaded, cancellationToken);
if(rlen != 0)
{
Loaded += rlen;
LoadOffset += rlen;
return true;
}
else
{
return false;
}
}
private bool BufferSome()
{
if (Loaded == InternalBuffer.Length)
{
return true;
}
var rlen = BaseStream.Read(InternalBuffer, LoadOffset, InternalBuffer.Length - Loaded);
if (rlen != 0)
{
Loaded += rlen;
return true;
}
else
{
return false;
}
}
private int ParseChunks(byte[] data, int offset, int count)
{
//prepare internal buffer for copying
if (ReadingChunkSize == -1)
{
var clen_end = InternalBuffer.IndexOf(CRLF, ReadOffset, Loaded - ReadOffset);
var hex_len = Encoding.ASCII.GetString(InternalBuffer, ReadOffset, clen_end - ReadOffset + 2);
ReadingChunkSize = Convert.ToInt32(hex_len.Trim(), 16);
ReadOffset += 2 + clen_end - ReadOffset;
}
var sending_data = Math.Min(count, ReadingChunkSize <= Loaded - ReadOffset ? ReadingChunkSize : Loaded);
Array.Copy(InternalBuffer, ReadOffset, data, offset, sending_data);
ReadOffset += sending_data;
//did we send all of the chunk this time, if so expect CRLF and reset chunk read size
if (sending_data == ReadingChunkSize)
{
ReadingChunkSize = -1;
ReadOffset += 2;
}
//if we moved all our buffer then reset read to start of buffer
if(ReadOffset == Loaded)
{
LoadOffset = 0;
ReadOffset = 0;
Loaded = 0;
}
//do we still have some data left on this chunk
//adjust the chunk size so we will copy the rest next time
if(sending_data < ReadingChunkSize)
{
ReadingChunkSize -= sending_data;
}
return sending_data;
}
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if(await BufferSomeAsync(cancellationToken) || Length > 0)
{
return ParseChunks(buffer, offset, count);
}
else
{
return 0;
}
}
public override int Read(byte[] buffer, int offset, int count)
{
if (BufferSome() || Length > 0)
{
return ParseChunks(buffer, offset, count);
}
else
{
return 0;
}
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
var chunk_len = Encoding.ASCII.GetBytes($"{count.ToString("X")}\r\n");
await BaseStream.WriteAsync(chunk_len, 0, chunk_len.Length, cancellationToken);
if (count > 0)
{
await BaseStream.WriteAsync(buffer, offset, count, cancellationToken);
}
await BaseStream.WriteAsync(CRLF, 0, CRLF.Length, cancellationToken);
}
public override void Write(byte[] buffer, int offset, int count)
{
var chunk_len = Encoding.ASCII.GetBytes($"{count.ToString("X")}\r\n");
BaseStream.Write(chunk_len, 0, chunk_len.Length);
if (count > 0)
{
BaseStream.Write(buffer, offset, count);
}
BaseStream.Write(CRLF, 0, CRLF.Length);
}
protected override void Dispose(bool disposing)
{
if (!LeaveOpen)
{
BaseStream.Dispose();
}
base.Dispose(disposing);
}
public override Task FlushAsync(CancellationToken cancellationToken)
{
return BaseStream.FlushAsync(cancellationToken);
}
public ChunkStream(Stream stream, int bufferSize = 16 * 1024, bool leaveOpen = false)
{
BaseStream = stream;
InternalBuffer = new byte[bufferSize];
LeaveOpen = leaveOpen;
}
}
}

View File

@ -0,0 +1,57 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading.Tasks;
namespace void_util
{
public static class Ext
{
public static async Task CopyToAsync(this Stream in_stream, Stream out_stream, Action<long> progress)
{
long total = 0;
var buff = new byte[1024];
int rlen = 0;
while((rlen = await in_stream.ReadAsync(buff, 0, buff.Length)) != 0)
{
await out_stream.WriteAsync(buff, 0, rlen);
total += rlen;
progress(total);
}
}
public static int IndexOf(this byte[] data, byte[] seq, int offset = 0, int length = -1)
{
if(length == -1)
{
length = data.Length;
}
if(offset + length > data.Length)
{
throw new IndexOutOfRangeException();
}
for(var x = offset; x < offset + length; x++)
{
bool checkpos = true;
for (var y = 0;y < seq.Length; y++)
{
if(data[x+y] != seq[y])
{
checkpos = false;
break;
}
}
if (checkpos)
{
return x;
}
}
return -1;
}
}
}

View File

@ -0,0 +1,416 @@
using Newtonsoft.Json;
using System;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Security;
using System.Net.Sockets;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
namespace void_util
{
class Program
{
public static string BaseHostname => "v3.void.cat";
public static string UserAgent => "VoidUtil/1.0";
static void PrintHelp()
{
Console.WriteLine($@"
Usage: void_util [MODE] [FILE|URL]
Modes:
upload Upload a file to {BaseHostname}
download Downloads a file and decrypts it
pack Packs a file into VBF format (can be useful for uploading with curl or another program)
");
Console.ReadKey();
}
static Task Main(string[] args)
{
if (args.Length > 1)
{
switch (args[0])
{
case "upload":
{
using (var rng = new RNGCryptoServiceProvider())
{
var key = new byte[16];
var iv = new byte[16];
rng.GetBytes(key);
rng.GetBytes(iv);
return UploadFileAsync(args[1], key, iv);
}
}
case "download":
{
return DownloadFileAsync(args[1]);
}
case "pack":
{
Console.WriteLine("Mode not implemented yet, please check github for updates");
break;
}
default:
{
Console.WriteLine($"Unknown mode: {args[0]}");
PrintHelp();
break;
}
}
return Task.CompletedTask;
}
else
{
PrintHelp();
return Task.CompletedTask;
}
}
public static byte[] FromHex(string hex)
{
return Enumerable.Range(0, hex.Length)
.Where(x => x % 2 == 0)
.Select(x => Convert.ToByte(hex.Substring(x, 2), 16))
.ToArray();
}
private static string ToHex(byte[] data)
{
return BitConverter.ToString(data).Replace("-", string.Empty).ToLower();
}
private static async Task UploadFileAsync(string filename, byte[] key, byte[] iv)
{
if (File.Exists(filename))
{
var file_info = new FileInfo(filename);
var site_info = await VoidApi.GetUploadHostAsync();
Console.WriteLine($"Starting upload for: {file_info.Name} => {site_info.upload_host}\nUsing key: {ToHex(key)} and IV: {ToHex(iv)}");
var file_length = file_info.Length;
var header = JsonConvert.SerializeObject(new FileHeader()
{
name = file_info.Name,
mime = "", // idk what to do with this haha, its not really important anyway since we dont preview in browser
len = (ulong)file_length
});
Console.WriteLine($"Using header: {header}");
// unforutnatly we need to use a raw socket here because HttpWebRequest just bufferes forever
//sad.. no good for large uploads
var hosts = await Dns.GetHostAddressesAsync(site_info.upload_host);
if (hosts.Length > 0)
{
var sock = new Socket(SocketType.Stream, ProtocolType.Tcp);
var tcs = new TaskCompletionSource<bool>();
var sae = new SocketAsyncEventArgs()
{
RemoteEndPoint = new IPEndPoint(hosts[0], 443)
};
sae.Completed += (s, e) =>
{
tcs.SetResult(true);
};
if (sock.ConnectAsync(sae))
{
await tcs.Task;
}
using (var ssl_stream = new SslStream(new NetworkStream(sock)))
{
await ssl_stream.AuthenticateAsClientAsync(site_info.upload_host);
var http_header = $"POST /upload HTTP/1.1\r\nHost: {site_info.upload_host}\r\nConnection: close\r\nContent-Type: application/octet-stream\r\nTransfer-Encoding: chunked\r\nUser-Agent: {UserAgent}\r\nTrailer: \r\nAccept-Encoding: 0\r\n\r\n";
var http_header_bytes = Encoding.UTF8.GetBytes(http_header);
await ssl_stream.WriteAsync(http_header_bytes, 0, http_header_bytes.Length);
await ssl_stream.FlushAsync();
using (var cs = new ChunkStream(ssl_stream, 16384, true))
{
//send the file data
using (var fs = file_info.OpenRead())
{
byte[] hash;
//create hmac
Console.WriteLine("Hashing...");
using (var hmac = HMAC.Create("HMACSHA256"))
{
hmac.Key = key;
hash = hmac.ComputeHash(fs);
}
Console.WriteLine($"Hash is {ToHex(hash)}");
fs.Seek(0, SeekOrigin.Begin);
//write header to request stream
var vbf_buf = new byte[37];
vbf_buf[0] = 1;
Array.Copy(hash, 0, vbf_buf, 1, hash.Length);
var ts_buf = BitConverter.GetBytes((UInt32)DateTimeOffset.Now.ToUnixTimeSeconds());
Array.Copy(ts_buf, 0, vbf_buf, 33, ts_buf.Length);
await cs.WriteAsync(vbf_buf, 0, vbf_buf.Length);
Console.WriteLine("Encrypting and Uploading...");
using (var aes = new AesManaged())
{
aes.Padding = PaddingMode.PKCS7;
aes.Mode = CipherMode.CBC;
using (var ds = aes.CreateEncryptor(key, iv))
{
var buf = new byte[ds.InputBlockSize * 1000];
var out_buf = new byte[ds.OutputBlockSize * 1000];
var header_bytes = Encoding.UTF8.GetBytes(header);
var hlb = BitConverter.GetBytes((UInt16)header_bytes.Length);
Array.Copy(hlb, buf, hlb.Length);
Array.Copy(header_bytes, 0, buf, 2, header_bytes.Length);
var init_offset = hlb.Length + header_bytes.Length;
long frlen = 0;
long tlen = 0;
while ((frlen = await fs.ReadAsync(buf, init_offset, buf.Length - init_offset)) > 0)
{
var actual_rlen = (int)(init_offset + frlen);
if (actual_rlen % ds.InputBlockSize != 0)
{
var last_block = ds.TransformFinalBlock(buf, 0, actual_rlen);
await cs.WriteAsync(last_block, 0, last_block.Length);
}
else
{
var clen = ds.TransformBlock(buf, 0, actual_rlen, out_buf, 0);
await cs.WriteAsync(out_buf, 0, clen);
}
//offset should always be 0 from after the first block
if (init_offset != 0)
{
init_offset = 0;
}
tlen += frlen;
Console.Write($"\r{(100 * (tlen / (decimal)file_length)).ToString("000.0")}%");
}
}
}
}
//write end chunk
await cs.WriteAsync(new byte[0], 0, 0);
await cs.FlushAsync();
}
//fuck my life why am i doing this to mysefl..
var crlf = new byte[] { 13, 10, 13, 10 };
var sb_headers = new StringBuilder();
var rlen = 0;
var header_buff = new byte[256];
var header_end = 0;
while ((rlen = await ssl_stream.ReadAsync(header_buff, 0, header_buff.Length)) != 0)
{
if ((header_end = header_buff.IndexOf(crlf)) != -1)
{
sb_headers.Append(Encoding.UTF8.GetString(header_buff, 0, header_end + 4));
break;
}
else
{
sb_headers.Append(Encoding.UTF8.GetString(header_buff, 0, rlen));
}
}
var header_dict = sb_headers.ToString().Split('\n').Select(a =>
{
var i = a.IndexOf(":");
return i == -1 ? null : new string[] { a.Substring(0, i), a.Substring(i + 2) };
}).Where(a => a != null).ToDictionary(a => a[0].Trim(), b => b.Length > 1 ? b[1].Trim() : null);
if (header_dict.ContainsKey("Content-Length"))
{
//we sent with chunked, idk why this happend
throw new Exception("Expected chunked response, got non-chunked response..");
}
else
{
if (header_dict.ContainsKey("Transfer-Encoding") && header_dict["Transfer-Encoding"] == "chunked")
{
using (var msb = new MemoryStream())
{
using (var cr = new ChunkStream(ssl_stream))
{
cr.PreLoadBuffer(header_buff, header_end + 4, rlen - header_end - 4);
await cr.CopyToAsync(msb);
}
var json_data = Encoding.UTF8.GetString(msb.ToArray());
var rsp = JsonConvert.DeserializeObject<UploadResponse>(json_data);
if (rsp != null)
{
if (rsp.status == 200)
{
Console.WriteLine($"\nUpload complete!\nUrl: https://{BaseHostname}/#{rsp.id}:{ToHex(key)}:{ToHex(iv)}");
}
else
{
Console.WriteLine($"\nUpload error: {rsp.msg}");
}
}
else
{
Console.WriteLine($"\nGot invalid response: {json_data}");
}
}
}
}
}
}
}
else
{
Console.WriteLine("\nError: file not found!");
}
}
private static async Task DownloadFileAsync(string url)
{
var url_base = new Uri(url);
var hash_frag = url_base.Fragment.Substring(1).Split(':');
Console.WriteLine($"Starting download for: {hash_frag[0]}");
var req = (HttpWebRequest)WebRequest.Create($"{url_base.Scheme}://{url_base.Host}/{hash_frag[0]}");
req.UserAgent = UserAgent;
var rsp = await req.GetResponseAsync();
var file_length = rsp.ContentLength;
using (var rsp_stream = rsp.GetResponseStream())
{
var version = rsp_stream.ReadByte();
var hmac = new byte[32];
var ts = new byte[4];
await rsp_stream.ReadAsync(hmac, 0, hmac.Length);
await rsp_stream.ReadAsync(ts, 0, ts.Length);
var tmp_name = Path.GetTempFileName();
string real_name = null;
using (var tmp_file = new FileStream(tmp_name, FileMode.Open, FileAccess.ReadWrite))
{
using (var aes = new AesManaged())
{
aes.Padding = PaddingMode.PKCS7;
aes.Mode = CipherMode.CBC;
using (var ds = aes.CreateDecryptor(FromHex(hash_frag[1]), FromHex(hash_frag[2])))
{
var buf = new byte[ds.InputBlockSize * 1024];
var out_buf = new byte[ds.OutputBlockSize * 1024];
bool first_block = true;
int read_offset = 0;
int last_rlen = 0;
long t_len = 0;
while(true)
{
var rlen = await rsp_stream.ReadAsync(buf, read_offset, buf.Length - read_offset);
//end do final block
if (rlen == 0)
{
var last_buf = ds.TransformFinalBlock(buf, 0, last_rlen);
await tmp_file.WriteAsync(last_buf, 0, last_buf.Length);
break;
}
else
{
if((read_offset + rlen) % ds.InputBlockSize != 0)
{
read_offset += rlen;
continue;
}
else
{
rlen += read_offset;
last_rlen = rlen;
read_offset = 0;
}
}
var clen = ds.TransformBlock(buf, 0, rlen, out_buf, 0);
if (first_block)
{
first_block = false;
var hlen = BitConverter.ToUInt16(out_buf, 0);
var header = Encoding.UTF8.GetString(out_buf, 2, hlen);
Console.WriteLine($"Header is: {header}");
var header_obj = JsonConvert.DeserializeObject<FileHeader>(header);
real_name = header_obj.name;
var file_start = 2 + hlen;
await tmp_file.WriteAsync(out_buf, file_start, clen - file_start);
}
else
{
await tmp_file.WriteAsync(out_buf, 0, clen);
}
t_len += rlen;
Console.Write($"\r{(100 * (t_len / (decimal)file_length)).ToString("000.0")}%");
}
}
}
}
//file is downloaded to temp path, move it now
var out_file = Path.Combine(Directory.GetCurrentDirectory(), real_name);
Console.WriteLine($"\nMoving file to {out_file}");
File.Move(tmp_name, out_file);
}
Console.WriteLine("\nDone!");
Console.ReadKey();
}
}
internal class FileHeader
{
public string name { get; set; }
public string mime { get; set; }
public ulong len { get; set; }
}
internal class FileData
{
public FileHeader Header { get; set; }
public string Hmac { get; set; }
public byte Version { get; set; }
public DateTime Uploaded { get; set; }
public byte[] EncryptedPayload { get; set; }
}
internal class UploadResponse
{
public int status { get; set; }
public string msg { get; set; }
public string id { get; set; }
public int[] sync { get; set; }
}
}

View File

@ -0,0 +1,8 @@
{
"profiles": {
"void_download": {
"commandName": "Project",
"commandLineArgs": "upload \"D:\\en_windows_10_consumer_edition_version_1803_updated_sep_2018_x64_dvd_69339216.iso\""
}
}
}

View File

@ -0,0 +1,66 @@
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Text;
using System.Threading.Tasks;
namespace void_util
{
public class BasicStats
{
public int Files { get; set; }
public long Size { get; set; }
public long Transfer_24h { get; set; }
}
public class SiteInfo
{
public long max_upload_size { get; set; }
public BasicStats basic_stats { get; set; }
public string upload_host { get; set; }
public string geoip_info { get; set; }
}
public class Cmd
{
public string cmd { get; set; }
}
public class ApiResponse<T>
{
public bool ok { get; set; }
public object msg { get; set; }
public T data { get; set; }
public Cmd cmd { get; set; }
}
public class VoidApi
{
public static async Task<string> CallApiAsync(string cmd)
{
var req = (HttpWebRequest)WebRequest.Create($"https://{Program.BaseHostname}/api");
req.Method = "POST";
req.ContentType = "application/json";
req.UserAgent = Program.UserAgent;
var cmd_data = Encoding.UTF8.GetBytes(cmd);
await (await req.GetRequestStreamAsync()).WriteAsync(cmd_data, 0, cmd_data.Length);
var rsp = await req.GetResponseAsync();
using (var sr = new StreamReader(rsp.GetResponseStream()))
{
return await sr.ReadToEndAsync();
}
}
public static async Task<SiteInfo> GetUploadHostAsync()
{
return JsonConvert.DeserializeObject<ApiResponse<SiteInfo>>(await CallApiAsync(JsonConvert.SerializeObject(new Cmd()
{
cmd = "site_info"
}))).data;
}
}
}

View File

@ -0,0 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFrameworks>netcoreapp2.1;net471</TargetFrameworks>
<LangVersion>latest</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="11.0.2" />
</ItemGroup>
</Project>