web -> desktop

This commit is contained in:
Alexey Kasyanchuk 2018-01-31 19:02:28 +03:00
parent 0e4888ab76
commit d8afce8964
95 changed files with 10679 additions and 1893 deletions

18
.babelrc Normal file
View File

@ -0,0 +1,18 @@
{
"presets": [
[
"@babel/env",
{
"targets": {
"browsers": "last 2 Chrome versions",
"node": "current"
}
}
],
"@babel/react",
"@babel/stage-0"
],
"plugins": [
["transform-object-rest-spread", { "useBuiltIns": true }]
]
}

13
.editorconfig Normal file
View File

@ -0,0 +1,13 @@
# editorconfig.org
root = true
[*]
indent_style = space
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.md]
trim_trailing_whitespace = false

15
.gitignore vendored
View File

@ -1,3 +1,12 @@
node_modules/*
build/
config.json
node_modules
.DS_Store
Thumbs.db
*.log
/dist
/temp
# ignore everything in 'app' folder what had been generated from 'src' folder
/app/app.js
/app/background.js
/app/**/*.map

9
LICENSE Normal file
View File

@ -0,0 +1,9 @@
The MIT License (MIT)
Copyright (c) 2015-2017 Jakub Szwacz
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

102
README.md Normal file
View File

@ -0,0 +1,102 @@
# electron-boilerplate
A minimalistic boilerplate for [Electron runtime](http://electron.atom.io). Tested on Windows, macOS and Linux.
This project contains only bare minimum of dependencies, to provide you with nice development environment. Doesn't impose on you any frontend technologies, so you can pick your favourite.
# Quick start
Make sure you have [Node.js](https://nodejs.org) installed, then type the following commands known to every Node developer...
```
git clone https://github.com/szwacz/electron-boilerplate.git
cd electron-boilerplate
npm install
npm start
```
...and you have a running desktop application on your screen.
# Structure of the project
The application consists of two main folders...
`src` - files within this folder get transpiled or compiled (because Electron can't use them directly).
`app` - contains all static assets which don't need any pre-processing. Put here images, CSSes, HTMLs, etc.
The build process compiles the content of the `src` folder and puts it into the `app` folder, so after the build has finished, your `app` folder contains the full, runnable application.
Treat `src` and `app` folders like two halves of one bigger thing.
The drawback of this design is that `app` folder contains some files which should be git-ignored and some which shouldn't (see `.gitignore` file). But this two-folders split makes development builds much, much faster.
# Development
## Starting the app
```
npm start
```
## The build pipeline
Build process uses [Webpack](https://webpack.js.org/). The entry-points are `src/background.js` and `src/app.js`. Webpack will follow all `import` statements starting from those files and compile code of the whole dependency tree into one `.js` file for each entry point.
[Babel](http://babeljs.io/) is also utilised, but mainly for its great error messages. Electron under the hood runs latest Chromium, hence most of the new JavaScript features are already natively supported.
## Environments
Environmental variables are done in a bit different way (not via `process.env`). Env files are plain JSONs in `config` directory, and build process dynamically links one of them as an `env` module. You can import it wherever in code you need access to the environment.
```js
import env from "env";
console.log(env.name);
```
## Upgrading Electron version
To do so edit `package.json`:
```json
"devDependencies": {
"electron": "1.7.9"
}
```
*Side note:* [Electron authors recommend](http://electron.atom.io/docs/tutorial/electron-versioning/) to use fixed version here.
## Adding npm modules to your app
Remember to respect the split between `dependencies` and `devDependencies` in `package.json` file. Your distributable app will contain modules listed in `dependencies` after running the release script.
*Side note:* If the module you want to use in your app is a native one (not pure JavaScript but compiled binary) you should first run `npm install name_of_npm_module` and then `npm run postinstall` to rebuild the module for Electron. You need to do this once after you're first time installing the module. Later on, the postinstall script will fire automatically with every `npm install`.
# Testing
Run all tests:
```
npm test
```
## Unit
```
npm run unit
```
Using [electron-mocha](https://github.com/jprichardson/electron-mocha) test runner with the [Chai](http://chaijs.com/api/assert/) assertion library. You can put your spec files wherever you want within the `src` directory, just name them with the `.spec.js` extension.
## End to end
```
npm run e2e
```
Using [Mocha](https://mochajs.org/) and [Spectron](http://electron.atom.io/spectron/). This task will run all files in `e2e` directory with `.e2e.js` extension.
# Making a release
To package your app into an installer use command:
```
npm run release
```
Once the packaging process finished, the `dist` directory will contain your distributable file.
We use [electron-builder](https://github.com/electron-userland/electron-builder) to handle the packaging process. It has a lot of [customization options](https://www.electron.build/configuration/configuration), which you can declare under `"build"` key in `package.json`.
You can package your app cross-platform from a single operating system, [electron-builder kind of supports this](https://www.electron.build/multi-platform-build), but there are limitations and asterisks. That's why this boilerplate doesn't do that by default.

14
app/app.html Normal file
View File

@ -0,0 +1,14 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<title>Torrent Search</title>
</head>
<body>
<div id="mount-point">
</div>
<script src="app.js"></script>
</body>
</html>

View File

@ -1,118 +0,0 @@
-- MySQL dump 10.16 Distrib 10.1.20-MariaDB, for Linux (x86_64)
--
-- Host: localhost Database: localhost
-- ------------------------------------------------------
-- Server version 10.1.20-MariaDB
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `files`
--
DROP TABLE IF EXISTS `files`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `files` (
`hash` char(40) DEFAULT NULL,
`path` text,
`size` bigint(20) unsigned DEFAULT NULL,
`fileid` int(10) unsigned NOT NULL AUTO_INCREMENT,
PRIMARY KEY (`fileid`),
KEY `hash` (`hash`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `sphinx_counter`
--
DROP TABLE IF EXISTS `sphinx_counter`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `sphinx_counter` (
`counter_id` int(11) NOT NULL,
`max_doc_id` int(10) unsigned NOT NULL,
PRIMARY KEY (`counter_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `statistic`
--
DROP TABLE IF EXISTS `statistic`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `statistic` (
`size` bigint(20) unsigned DEFAULT NULL,
`files` bigint(20) unsigned DEFAULT NULL,
`torrents` int(10) unsigned DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `torrents`
--
DROP TABLE IF EXISTS `torrents`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `torrents` (
`hash` char(40) NOT NULL,
`name` text,
`size` bigint(20) unsigned DEFAULT NULL,
`files` int(10) unsigned DEFAULT NULL,
`piecelength` int(10) unsigned DEFAULT NULL,
`added` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`ipv4` char(15) DEFAULT NULL,
`port` smallint(5) unsigned DEFAULT NULL,
`contentType` enum('video','audio','pictures','books','application','archive','disc') DEFAULT NULL,
`contentCategory` varchar(32) DEFAULT NULL,
`seeders` int(10) unsigned DEFAULT NULL,
`leechers` int(10) unsigned DEFAULT NULL,
`completed` int(10) unsigned DEFAULT NULL,
`torrentid` int(10) unsigned NOT NULL AUTO_INCREMENT,
`trackersChecked` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`good` int(10) unsigned DEFAULT '0',
`bad` int(10) unsigned DEFAULT '0',
PRIMARY KEY (`hash`),
UNIQUE KEY `torrentid` (`torrentid`),
KEY `added` (`added`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `torrents_actions`
--
DROP TABLE IF EXISTS `torrents_actions`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `torrents_actions` (
`hash` char(40) NOT NULL,
`action` varchar(32) DEFAULT NULL,
`ipv4` char(15) DEFAULT NULL,
KEY `hash` (`hash`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2017-01-30 12:10:40

20
build/start.js Normal file
View File

@ -0,0 +1,20 @@
const childProcess = require("child_process");
const electron = require("electron");
const webpack = require("webpack");
const config = require("./webpack.app.config");
const env = "development";
const compiler = webpack(config(env));
let electronStarted = false;
const watching = compiler.watch({}, (err, stats) => {
if (!err && !stats.hasErrors() && !electronStarted) {
electronStarted = true;
childProcess
.spawn(electron, ["."], { stdio: "inherit" })
.on("close", () => {
watching.close();
});
}
});

View File

@ -0,0 +1,16 @@
const path = require("path");
const merge = require("webpack-merge");
const base = require("./webpack.base.config");
module.exports = env => {
return merge(base(env), {
entry: {
background: "./src/background/background.js",
app: "./src/app/index.js"
},
output: {
filename: "[name].js",
path: path.resolve(__dirname, "../app")
}
});
};

View File

@ -0,0 +1,40 @@
const path = require("path");
const nodeExternals = require("webpack-node-externals");
const FriendlyErrorsWebpackPlugin = require("friendly-errors-webpack-plugin");
module.exports = env => {
return {
target: "node",
node: {
__dirname: false,
__filename: false
},
externals: [nodeExternals()],
resolve: {
alias: {
env: path.resolve(__dirname, `../config/env_${env}.json`)
}
},
devtool: "source-map",
module: {
rules: [
{
test: /\.js$/,
exclude: /node_modules/,
use: ["babel-loader"]
},
{
test: /\.css$/,
use: ["style-loader", "css-loader"]
},
{
test: /\.(?:ico|gif|png|jpg|jpeg|webp)$/,
use: ['url-loader']
}
]
},
plugins: [
new FriendlyErrorsWebpackPlugin({ clearConsole: env === "development" })
]
};
};

View File

@ -0,0 +1,29 @@
const merge = require("webpack-merge");
const jetpack = require("fs-jetpack");
const base = require("./webpack.base.config");
// Test files are scattered through the whole project. Here we're searching
// for them and generating entry file for webpack.
const e2eDir = jetpack.cwd("e2e");
const tempDir = jetpack.cwd("temp");
const entryFilePath = tempDir.path("e2e_entry.js");
const entryFileContent = e2eDir
.find({ matching: "*.e2e.js" })
.reduce((fileContent, path) => {
const normalizedPath = path.replace(/\\/g, "/");
return `${fileContent}import "../e2e/${normalizedPath}";\n`;
}, "");
jetpack.write(entryFilePath, entryFileContent);
module.exports = env => {
return merge(base(env), {
entry: entryFilePath,
output: {
filename: "e2e.js",
path: tempDir.path()
}
});
};

View File

@ -0,0 +1,29 @@
const merge = require("webpack-merge");
const jetpack = require("fs-jetpack");
const base = require("./webpack.base.config");
// Test files are scattered through the whole project. Here we're searching
// for them and generating entry file for webpack.
const srcDir = jetpack.cwd("src");
const tempDir = jetpack.cwd("temp");
const entryFilePath = tempDir.path("specs_entry.js");
const entryFileContent = srcDir
.find({ matching: "*.spec.js" })
.reduce((fileContent, path) => {
const normalizedPath = path.replace(/\\/g, "/");
return `${fileContent}import "../src/${normalizedPath}";\n`;
}, "");
jetpack.write(entryFilePath, entryFileContent);
module.exports = env => {
return merge(base(env), {
entry: entryFilePath,
output: {
filename: "specs.js",
path: tempDir.path()
}
});
};

View File

@ -1,28 +0,0 @@
// Grab NODE_ENV and REACT_APP_* environment variables and prepare them to be
// injected into the application via DefinePlugin in Webpack configuration.
var REACT_APP = /^REACT_APP_/i;
function getClientEnvironment(publicUrl) {
var processEnv = Object
.keys(process.env)
.filter(key => REACT_APP.test(key))
.reduce((env, key) => {
env[key] = JSON.stringify(process.env[key]);
return env;
}, {
// Useful for determining whether were running in production mode.
// Most importantly, it switches React into the correct mode.
'NODE_ENV': JSON.stringify(
process.env.NODE_ENV || 'development'
),
// Useful for resolving the correct path to static assets in `public`.
// For example, <img src={process.env.PUBLIC_URL + '/img/logo.png'} />.
// This should only be used as an escape hatch. Normally you would put
// images into the `src` and `import` them in code to get their paths.
'PUBLIC_URL': JSON.stringify(publicUrl)
});
return {'process.env': processEnv};
}
module.exports = getClientEnvironment;

View File

@ -0,0 +1,4 @@
{
"name": "development",
"description": "Add here any environment specific stuff you like."
}

View File

@ -0,0 +1,4 @@
{
"name": "production",
"description": "Add here any environment specific stuff you like."
}

4
config/env_test.json Normal file
View File

@ -0,0 +1,4 @@
{
"name": "test",
"description": "Add here any environment specific stuff you like."
}

View File

@ -1,12 +0,0 @@
// This is a custom Jest transformer turning style imports into empty objects.
// http://facebook.github.io/jest/docs/tutorial-webpack.html
module.exports = {
process() {
return 'module.exports = {};';
},
getCacheKey(fileData, filename) {
// The output is always the same.
return 'cssTransform';
},
};

View File

@ -1,10 +0,0 @@
const path = require('path');
// This is a custom Jest transformer turning file imports into filenames.
// http://facebook.github.io/jest/docs/tutorial-webpack.html
module.exports = {
process(src, filename) {
return 'module.exports = ' + JSON.stringify(path.basename(filename)) + ';';
},
};

View File

@ -1,45 +0,0 @@
var path = require('path');
var fs = require('fs');
// Make sure any symlinks in the project folder are resolved:
// https://github.com/facebookincubator/create-react-app/issues/637
var appDirectory = fs.realpathSync(process.cwd());
function resolveApp(relativePath) {
return path.resolve(appDirectory, relativePath);
}
// We support resolving modules according to `NODE_PATH`.
// This lets you use absolute paths in imports inside large monorepos:
// https://github.com/facebookincubator/create-react-app/issues/253.
// It works similar to `NODE_PATH` in Node itself:
// https://nodejs.org/api/modules.html#modules_loading_from_the_global_folders
// We will export `nodePaths` as an array of absolute paths.
// It will then be used by Webpack configs.
// Jest doesnt need this because it already handles `NODE_PATH` out of the box.
// Note that unlike in Node, only *relative* paths from `NODE_PATH` are honored.
// Otherwise, we risk importing Node.js core modules into an app instead of Webpack shims.
// https://github.com/facebookincubator/create-react-app/issues/1023#issuecomment-265344421
var nodePaths = (process.env.NODE_PATH || '')
.split(process.platform === 'win32' ? ';' : ':')
.filter(Boolean)
.filter(folder => !path.isAbsolute(folder))
.map(resolveApp);
// config after eject: we're in ./config/
module.exports = {
appBuild: resolveApp('build'),
appPublic: resolveApp('public'),
appHtml: resolveApp('public/index.html'),
appIndexJs: resolveApp('src/index.js'),
appPackageJson: resolveApp('package.json'),
appSrc: resolveApp('src'),
yarnLockFile: resolveApp('yarn.lock'),
testsSetup: resolveApp('src/setupTests.js'),
appNodeModules: resolveApp('node_modules'),
ownNodeModules: resolveApp('node_modules'),
nodePaths: nodePaths
};

View File

@ -1,14 +0,0 @@
if (typeof Promise === 'undefined') {
// Rejection tracking prevents a common issue where React gets into an
// inconsistent state due to an error, but it gets swallowed by a Promise,
// and the user has no idea what causes React's erratic future behavior.
require('promise/lib/rejection-tracking').enable();
window.Promise = require('promise/lib/es6-extensions.js');
}
// fetch() polyfill for making API calls.
require('whatwg-fetch');
// Object.assign() is commonly used with React.
// It will use the native implementation if it's present and isn't buggy.
Object.assign = require('object-assign');

View File

@ -1,208 +0,0 @@
var autoprefixer = require('autoprefixer');
var webpack = require('webpack');
var HtmlWebpackPlugin = require('html-webpack-plugin');
var CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
var InterpolateHtmlPlugin = require('react-dev-utils/InterpolateHtmlPlugin');
var WatchMissingNodeModulesPlugin = require('react-dev-utils/WatchMissingNodeModulesPlugin');
var getClientEnvironment = require('./env');
var paths = require('./paths');
// Webpack uses `publicPath` to determine where the app is being served from.
// In development, we always serve from the root. This makes config easier.
var publicPath = '/';
// `publicUrl` is just like `publicPath`, but we will provide it to our app
// as %PUBLIC_URL% in `index.html` and `process.env.PUBLIC_URL` in JavaScript.
// Omit trailing slash as %PUBLIC_PATH%/xyz looks better than %PUBLIC_PATH%xyz.
var publicUrl = '';
// Get environment variables to inject into our app.
var env = getClientEnvironment(publicUrl);
// This is the development configuration.
// It is focused on developer experience and fast rebuilds.
// The production configuration is different and lives in a separate file.
module.exports = {
// You may want 'eval' instead if you prefer to see the compiled output in DevTools.
// See the discussion in https://github.com/facebookincubator/create-react-app/issues/343.
devtool: 'cheap-module-source-map',
// These are the "entry points" to our application.
// This means they will be the "root" imports that are included in JS bundle.
// The first two entry points enable "hot" CSS and auto-refreshes for JS.
entry: [
// Include an alternative client for WebpackDevServer. A client's job is to
// connect to WebpackDevServer by a socket and get notified about changes.
// When you save a file, the client will either apply hot updates (in case
// of CSS changes), or refresh the page (in case of JS changes). When you
// make a syntax error, this client will display a syntax error overlay.
// Note: instead of the default WebpackDevServer client, we use a custom one
// to bring better experience for Create React App users. You can replace
// the line below with these two lines if you prefer the stock client:
// require.resolve('webpack-dev-server/client') + '?/',
// require.resolve('webpack/hot/dev-server'),
require.resolve('react-dev-utils/webpackHotDevClient'),
// We ship a few polyfills by default:
require.resolve('./polyfills'),
// Finally, this is your app's code:
paths.appIndexJs
// We include the app code last so that if there is a runtime error during
// initialization, it doesn't blow up the WebpackDevServer client, and
// changing JS code would still trigger a refresh.
],
output: {
// Next line is not used in dev but WebpackDevServer crashes without it:
path: paths.appBuild,
// Add /* filename */ comments to generated require()s in the output.
pathinfo: true,
// This does not produce a real file. It's just the virtual path that is
// served by WebpackDevServer in development. This is the JS bundle
// containing code from all our entry points, and the Webpack runtime.
filename: 'static/js/bundle.js',
// This is the URL that app is served from. We use "/" in development.
publicPath: publicPath
},
resolve: {
// This allows you to set a fallback for where Webpack should look for modules.
// We read `NODE_PATH` environment variable in `paths.js` and pass paths here.
// We use `fallback` instead of `root` because we want `node_modules` to "win"
// if there any conflicts. This matches Node resolution mechanism.
// https://github.com/facebookincubator/create-react-app/issues/253
fallback: paths.nodePaths,
// These are the reasonable defaults supported by the Node ecosystem.
// We also include JSX as a common component filename extension to support
// some tools, although we do not recommend using it, see:
// https://github.com/facebookincubator/create-react-app/issues/290
extensions: ['.js', '.json', '.jsx', ''],
alias: {
// Support React Native Web
// https://www.smashingmagazine.com/2016/08/a-glimpse-into-the-future-with-react-native-for-web/
'react-native': 'react-native-web'
}
},
module: {
// First, run the linter.
// It's important to do this before Babel processes the JS.
preLoaders: [
{
test: /\.(js|jsx)$/,
loader: 'eslint',
include: paths.appSrc,
}
],
loaders: [
// Default loader: load all assets that are not handled
// by other loaders with the url loader.
// Note: This list needs to be updated with every change of extensions
// the other loaders match.
// E.g., when adding a loader for a new supported file extension,
// we need to add the supported extension to this loader too.
// Add one new line in `exclude` for each loader.
//
// "file" loader makes sure those assets get served by WebpackDevServer.
// When you `import` an asset, you get its (virtual) filename.
// In production, they would get copied to the `build` folder.
// "url" loader works like "file" loader except that it embeds assets
// smaller than specified limit in bytes as data URLs to avoid requests.
// A missing `test` is equivalent to a match.
{
exclude: [
/\.html$/,
/\.(js|jsx)$/,
/\.css$/,
/\.json$/,
/\.svg$/
],
loader: 'url',
query: {
limit: 10000,
name: 'static/media/[name].[hash:8].[ext]'
}
},
// Process JS with Babel.
{
test: /\.(js|jsx)$/,
include: paths.appSrc,
loader: 'babel',
query: {
// This is a feature of `babel-loader` for webpack (not Babel itself).
// It enables caching results in ./node_modules/.cache/babel-loader/
// directory for faster rebuilds.
cacheDirectory: true
}
},
// "postcss" loader applies autoprefixer to our CSS.
// "css" loader resolves paths in CSS and adds assets as dependencies.
// "style" loader turns CSS into JS modules that inject <style> tags.
// In production, we use a plugin to extract that CSS to a file, but
// in development "style" loader enables hot editing of CSS.
{
test: /\.css$/,
loader: 'style!css?importLoaders=1!postcss'
},
// JSON is not enabled by default in Webpack but both Node and Browserify
// allow it implicitly so we also enable it.
{
test: /\.json$/,
loader: 'json'
},
// "file" loader for svg
{
test: /\.svg$/,
loader: 'file',
query: {
name: 'static/media/[name].[hash:8].[ext]'
}
}
]
},
// We use PostCSS for autoprefixing only.
postcss: function() {
return [
autoprefixer({
browsers: [
'>1%',
'last 4 versions',
'Firefox ESR',
'not ie < 9', // React doesn't support IE8 anyway
]
}),
];
},
plugins: [
// Makes the public URL available as %PUBLIC_URL% in index.html, e.g.:
// <link rel="shortcut icon" href="%PUBLIC_URL%/favicon.ico">
// In development, this will be an empty string.
new InterpolateHtmlPlugin({
PUBLIC_URL: publicUrl
}),
// Generates an `index.html` file with the <script> injected.
new HtmlWebpackPlugin({
inject: true,
template: paths.appHtml,
}),
// Makes some environment variables available to the JS code, for example:
// if (process.env.NODE_ENV === 'development') { ... }. See `./env.js`.
new webpack.DefinePlugin(env),
// This is necessary to emit hot updates (currently CSS only):
new webpack.HotModuleReplacementPlugin(),
// Watcher doesn't work well if you mistype casing in a path so we use
// a plugin that prints an error when you attempt to do this.
// See https://github.com/facebookincubator/create-react-app/issues/240
new CaseSensitivePathsPlugin(),
// If you require a missing module and then `npm install` it, you still have
// to restart the development server for Webpack to discover it. This plugin
// makes the discovery automatic so you don't have to restart.
// See https://github.com/facebookincubator/create-react-app/issues/186
new WatchMissingNodeModulesPlugin(paths.appNodeModules)
],
// Some libraries import Node modules but don't use them in the browser.
// Tell Webpack to provide empty mocks for them so importing them works.
node: {
fs: 'empty',
net: 'empty',
tls: 'empty'
}
};

View File

@ -1,277 +0,0 @@
var autoprefixer = require('autoprefixer');
var webpack = require('webpack');
var HtmlWebpackPlugin = require('html-webpack-plugin');
var ExtractTextPlugin = require('extract-text-webpack-plugin');
var ManifestPlugin = require('webpack-manifest-plugin');
var InterpolateHtmlPlugin = require('react-dev-utils/InterpolateHtmlPlugin');
var SWPrecacheWebpackPlugin = require('sw-precache-webpack-plugin');
var url = require('url');
var paths = require('./paths');
var getClientEnvironment = require('./env');
const RobotsPlugin = require('@tanepiper/robots-webpack-plugin');
function ensureSlash(path, needsSlash) {
var hasSlash = path.endsWith('/');
if (hasSlash && !needsSlash) {
return path.substr(path, path.length - 1);
} else if (!hasSlash && needsSlash) {
return path + '/';
} else {
return path;
}
}
// We use "homepage" field to infer "public path" at which the app is served.
// Webpack needs to know it to put the right <script> hrefs into HTML even in
// single-page apps that may serve index.html for nested URLs like /todos/42.
// We can't use a relative path in HTML because we don't want to load something
// like /todos/42/static/js/bundle.7289d.js. We have to know the root.
var homepagePath = require(paths.appPackageJson).homepage;
var homepagePathname = homepagePath ? url.parse(homepagePath).pathname : '/';
// Webpack uses `publicPath` to determine where the app is being served from.
// It requires a trailing slash, or the file assets will get an incorrect path.
var publicPath = ensureSlash(homepagePathname, true);
// `publicUrl` is just like `publicPath`, but we will provide it to our app
// as %PUBLIC_URL% in `index.html` and `process.env.PUBLIC_URL` in JavaScript.
// Omit trailing slash as %PUBLIC_PATH%/xyz looks better than %PUBLIC_PATH%xyz.
var publicUrl = ensureSlash(homepagePathname, false);
// Get environment variables to inject into our app.
var env = getClientEnvironment(publicUrl);
// Assert this just to be safe.
// Development builds of React are slow and not intended for production.
if (env['process.env'].NODE_ENV !== '"production"') {
throw new Error('Production builds must have NODE_ENV=production.');
}
// This is the production configuration.
// It compiles slowly and is focused on producing a fast and minimal bundle.
// The development configuration is different and lives in a separate file.
module.exports = {
// Don't attempt to continue if there are any errors.
bail: true,
// We generate sourcemaps in production. This is slow but gives good results.
// You can exclude the *.map files from the build during deployment.
devtool: 'source-map',
// In production, we only want to load the polyfills and the app code.
entry: [
require.resolve('./polyfills'),
paths.appIndexJs
],
output: {
// The build folder.
path: paths.appBuild,
// Generated JS file names (with nested folders).
// There will be one main bundle, and one file per asynchronous chunk.
// We don't currently advertise code splitting but Webpack supports it.
filename: 'static/js/[name].[chunkhash:8].js',
chunkFilename: 'static/js/[name].[chunkhash:8].chunk.js',
// We inferred the "public path" (such as / or /my-project) from homepage.
publicPath: publicPath
},
resolve: {
// This allows you to set a fallback for where Webpack should look for modules.
// We read `NODE_PATH` environment variable in `paths.js` and pass paths here.
// We use `fallback` instead of `root` because we want `node_modules` to "win"
// if there any conflicts. This matches Node resolution mechanism.
// https://github.com/facebookincubator/create-react-app/issues/253
fallback: paths.nodePaths,
// These are the reasonable defaults supported by the Node ecosystem.
// We also include JSX as a common component filename extension to support
// some tools, although we do not recommend using it, see:
// https://github.com/facebookincubator/create-react-app/issues/290
extensions: ['.js', '.json', '.jsx', ''],
alias: {
// Support React Native Web
// https://www.smashingmagazine.com/2016/08/a-glimpse-into-the-future-with-react-native-for-web/
'react-native': 'react-native-web'
}
},
module: {
// First, run the linter.
// It's important to do this before Babel processes the JS.
preLoaders: [
{
test: /\.(js|jsx)$/,
loader: 'eslint',
include: paths.appSrc
}
],
loaders: [
// Default loader: load all assets that are not handled
// by other loaders with the url loader.
// Note: This list needs to be updated with every change of extensions
// the other loaders match.
// E.g., when adding a loader for a new supported file extension,
// we need to add the supported extension to this loader too.
// Add one new line in `exclude` for each loader.
//
// "file" loader makes sure those assets end up in the `build` folder.
// When you `import` an asset, you get its filename.
// "url" loader works just like "file" loader but it also embeds
// assets smaller than specified size as data URLs to avoid requests.
{
exclude: [
/\.html$/,
/\.(js|jsx)$/,
/\.css$/,
/\.json$/,
/\.svg$/
],
loader: 'url',
query: {
limit: 10000,
name: 'static/media/[name].[hash:8].[ext]'
}
},
// Process JS with Babel.
{
test: /\.(js|jsx)$/,
include: paths.appSrc,
loader: 'babel',
},
// The notation here is somewhat confusing.
// "postcss" loader applies autoprefixer to our CSS.
// "css" loader resolves paths in CSS and adds assets as dependencies.
// "style" loader normally turns CSS into JS modules injecting <style>,
// but unlike in development configuration, we do something different.
// `ExtractTextPlugin` first applies the "postcss" and "css" loaders
// (second argument), then grabs the result CSS and puts it into a
// separate file in our build process. This way we actually ship
// a single CSS file in production instead of JS code injecting <style>
// tags. If you use code splitting, however, any async bundles will still
// use the "style" loader inside the async code so CSS from them won't be
// in the main CSS file.
{
test: /\.css$/,
loader: ExtractTextPlugin.extract('style', 'css?importLoaders=1!postcss')
// Note: this won't work without `new ExtractTextPlugin()` in `plugins`.
},
// JSON is not enabled by default in Webpack but both Node and Browserify
// allow it implicitly so we also enable it.
{
test: /\.json$/,
loader: 'json'
},
// "file" loader for svg
{
test: /\.svg$/,
loader: 'file',
query: {
name: 'static/media/[name].[hash:8].[ext]'
}
}
]
},
// We use PostCSS for autoprefixing only.
postcss: function() {
return [
autoprefixer({
browsers: [
'>1%',
'last 4 versions',
'Firefox ESR',
'not ie < 9', // React doesn't support IE8 anyway
]
}),
];
},
plugins: [
// Makes the public URL available as %PUBLIC_URL% in index.html, e.g.:
// <link rel="shortcut icon" href="%PUBLIC_URL%/favicon.ico">
// In production, it will be an empty string unless you specify "homepage"
// in `package.json`, in which case it will be the pathname of that URL.
new InterpolateHtmlPlugin({
PUBLIC_URL: publicUrl
}),
// Generates an `index.html` file with the <script> injected.
new HtmlWebpackPlugin({
inject: true,
template: paths.appHtml,
minify: {
removeComments: true,
collapseWhitespace: true,
removeRedundantAttributes: true,
useShortDoctype: true,
removeEmptyAttributes: true,
removeStyleLinkTypeAttributes: true,
keepClosingSlash: true,
minifyJS: true,
minifyCSS: true,
minifyURLs: true
}
}),
// Makes some environment variables available to the JS code, for example:
// if (process.env.NODE_ENV === 'production') { ... }. See `./env.js`.
// It is absolutely essential that NODE_ENV was set to production here.
// Otherwise React will be compiled in the very slow development mode.
new webpack.DefinePlugin(env),
// This helps ensure the builds are consistent if source hasn't changed:
new webpack.optimize.OccurrenceOrderPlugin(),
// Try to dedupe duplicated modules, if any:
new webpack.optimize.DedupePlugin(),
// Minify the code.
new webpack.optimize.UglifyJsPlugin({
compress: {
screw_ie8: true, // React doesn't support IE8
warnings: false
},
mangle: {
screw_ie8: true
},
output: {
comments: false,
screw_ie8: true
}
}),
// Note: this won't work without ExtractTextPlugin.extract(..) in `loaders`.
new ExtractTextPlugin('static/css/[name].[contenthash:8].css'),
// Generate a manifest file which contains a mapping of all asset filenames
// to their corresponding output file so that tools can pick it up without
// having to parse `index.html`.
new ManifestPlugin({
fileName: 'asset-manifest.json'
}),
new SWPrecacheWebpackPlugin({
// By default, a cache-busting query parameter is appended to requests
// used to populate the caches, to ensure the responses are fresh.
// If a URL is already hashed by Webpack, then there is no concern
// about it being stale, and the cache-busting can be skipped.
dontCacheBustUrlsMatching: /\.\w{8}\./,
filename: 'service-worker.js',
logger(message) {
if (message.indexOf('Total precache size is') === 0) {
// This message occurs for every build and is a bit too noisy.
return;
}
if (message.indexOf('Skipping static resource') === 0) {
// This message obscures real errors so we ignore it.
// https://github.com/facebookincubator/create-react-app/issues/2612
return;
}
console.log(message);
},
minify: true,
// For unknown URLs, fallback to the index page
navigateFallback: publicUrl + '/index.html',
// Ignores URLs starting from /__ (useful for Firebase):
// https://github.com/facebookincubator/create-react-app/issues/2237#issuecomment-302693219
navigateFallbackWhitelist: [/^(?!\/__).*/],
// Don't precache sourcemaps (they're large) and build asset manifest:
staticFileGlobsIgnorePatterns: [/\.map$/, /asset-manifest\.json$/],
}),
new RobotsPlugin({sitemap: 'http://ratsontheboat.org/sitemap.xml'})
],
// Some libraries import Node modules but don't use them in the browser.
// Tell Webpack to provide empty mocks for them so importing them works.
node: {
fs: 'empty',
net: 'empty',
tls: 'empty'
}
};

17
crontab
View File

@ -1,17 +0,0 @@
SHELL=/bin/bash
PATH=/sbin:/bin:/usr/sbin:/usr/bin
MAILTO=root
# For details see man 4 crontabs
# Example of job definition:
# .---------------- minute (0 - 59)
# | .------------- hour (0 - 23)
# | | .---------- day of month (1 - 31)
# | | | .------- month (1 - 12) OR jan,feb,mar,apr ...
# | | | | .---- day of week (0 - 6) (Sunday=0 or 7) OR sun,mon,tue,wed,thu,fri,sat
# | | | | |
# * * * * * user-name command to be executed
*/10 * * * * root indexer --rotate files_index_delta torrents_index_delta
0 3 * * * root indexer --rotate --all

8
deploy
View File

@ -1,8 +0,0 @@
#!/bin/bash
SOURCE_PATH="`dirname \"$0\"`"
cd "$SOURCE_PATH"
npm install
pm2 stop index.js
npm run build
pm2 start index.js

12
e2e/hello_world.e2e.js Normal file
View File

@ -0,0 +1,12 @@
import { expect } from "chai";
import testUtils from "./utils";
describe("application launch", () => {
beforeEach(testUtils.beforeEach);
afterEach(testUtils.afterEach);
it("index page loaded", async function() {
const { app } = this
await app.client.waitForExist('#index-window')
});
});

26
e2e/utils.js Normal file
View File

@ -0,0 +1,26 @@
import electron from "electron";
import { Application } from "spectron";
const beforeEach = function() {
this.timeout(10000);
this.app = new Application({
path: electron,
args: ["."],
startTimeout: 10000,
waitTimeout: 10000
});
return this.app.start();
};
const afterEach = function() {
this.timeout(10000);
if (this.app && this.app.isRunning()) {
return this.app.stop();
}
return undefined;
};
export default {
beforeEach,
afterEach
};

View File

@ -1,10 +0,0 @@
<!doctype html>
<html lang="ru">
<head>
<title>То чего тут нет но скоро наверное где-то будет</title>
</head>
<body>
<div id="root">Всем привет, благодарю всех кто поучаствовал в альфа-тесте сами знаете чего. Следите за обновлениями и ждите новых новостей</div>
<img src='http://s9.pikabu.ru/images/big_size_comm/2017-01_1/1483546623127164693.jpg' />
</body>
</html>

View File

@ -1,17 +0,0 @@
var express = require('express');
var app = express();
var server = require('http').Server(app);
app.get('/', function(req, res)
{
res.sendfile(__dirname + '/build/index.html');
});
app.use(express.static('build'));
app.get('*', function(req, res)
{
res.sendfile(__dirname + '/build/index.html');
});
server.listen(8095);

9322
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,107 +1,107 @@
{
"name": "btsearch",
"name": "rats-search",
"productName": "Rats on The Boat",
"description": "P2P BitTorrent search engine",
"version": "0.1.0",
"private": true,
"devDependencies": {
"@tanepiper/robots-webpack-plugin": "^0.4.0",
"autoprefixer": "6.5.1",
"babel-core": "6.17.0",
"babel-eslint": "7.1.1",
"babel-jest": "17.0.2",
"babel-loader": "6.2.7",
"babel-preset-react-app": "^2.0.1",
"case-sensitive-paths-webpack-plugin": "1.1.4",
"chalk": "1.1.3",
"connect-history-api-fallback": "1.3.0",
"cross-spawn": "4.0.2",
"css-loader": "0.26.0",
"debug": "^2.6.9",
"detect-port": "1.0.1",
"dotenv": "2.0.0",
"eslint": "3.8.1",
"eslint-config-react-app": "^0.5.0",
"eslint-loader": "1.6.0",
"eslint-plugin-flowtype": "2.21.0",
"eslint-plugin-import": "2.0.1",
"eslint-plugin-jsx-a11y": "2.2.3",
"eslint-plugin-react": "6.4.1",
"extract-text-webpack-plugin": "1.0.1",
"file-loader": "0.9.0",
"filesize": "3.3.0",
"fs-extra": "0.30.0",
"gzip-size": "3.0.0",
"html-webpack-plugin": "2.24.0",
"http-proxy-middleware": "0.17.2",
"jest": "17.0.2",
"json-loader": "0.5.4",
"object-assign": "4.1.0",
"path-exists": "2.1.0",
"postcss-loader": "1.0.0",
"promise": "7.1.1",
"react-dev-utils": "^0.4.2",
"recursive-readdir": "2.1.0",
"strip-ansi": "3.0.1",
"style-loader": "0.13.1",
"sw-precache-webpack-plugin": "0.11.4",
"url-loader": "0.5.7",
"webpack": "1.14.0",
"webpack-dev-server": "1.16.2",
"webpack-manifest-plugin": "1.1.0",
"whatwg-fetch": "1.0.0"
},
"dependencies": {
"bencode": "^0.11.0",
"bitfield": "^1.1.2",
"diskusage": "^0.2.4",
"express": "^4.16.2",
"ipaddr.js": "^1.5.4",
"material-ui": "^0.16.6",
"moment": "^2.19.1",
"mysql": "^2.15.0",
"page": "^1.7.1",
"phantomjs-prebuilt": "^2.1.15",
"react": "^15.6.2",
"react-dom": "^15.6.2",
"react-input-range": "^1.2.1",
"react-tap-event-plugin": "^2.0.1",
"sitemap": "^1.13.0",
"socket.io": "^2.0.4"
},
"scripts": {
"start": "node scripts/start.js",
"build": "node scripts/build.js",
"test": "node scripts/test.js --env=jsdom"
},
"jest": {
"collectCoverageFrom": [
"src/**/*.{js,jsx}"
"author": "Alexey Kasyanchuk <degitx@gmail.com>",
"copyright": "© 2018 Draftup Software",
"homepage": "https://github.com/DEgITx/rats-search",
"main": "app/background.js",
"build": {
"appId": "com.example.electron-boilerplate",
"files": [
"app/**/*",
"node_modules/**/*",
"package.json"
],
"setupFiles": [
"<rootDir>\\config\\polyfills.js"
"extraResources": [
{
"from": "resources/icons/512x512.png",
"to": "icons/512x512.png"
}
],
"testPathIgnorePatterns": [
"<rootDir>[/\\\\](build|docs|node_modules)[/\\\\]"
],
"testEnvironment": "node",
"testURL": "http://localhost",
"transform": {
"^.+\\.(js|jsx)$": "<rootDir>/node_modules/babel-jest",
"^.+\\.css$": "<rootDir>\\config\\jest\\cssTransform.js",
"^(?!.*\\.(js|jsx|css|json)$)": "<rootDir>\\config\\jest\\fileTransform.js"
"directories": {
"buildResources": "resources"
},
"transformIgnorePatterns": [
"[/\\\\]node_modules[/\\\\].+\\.(js|jsx)$"
],
"moduleNameMapper": {
"^react-native$": "react-native-web"
"publish": null,
"win": {
"extraFiles": [
{
"from": "./imports/win",
"to": "."
}
]
},
"linux": {
"category": "Files",
"target": [
"AppImage",
"rpm",
"deb"
],
"extraFiles": [
{
"from": "./imports/linux",
"to": "."
}
]
},
"mac": {
"extraFiles": [
{
"from": "./imports/darwin",
"to": "./MacOS"
}
]
}
},
"babel": {
"presets": [
"react-app"
]
"scripts": {
"postinstall": "electron-builder install-app-deps",
"preunit": "webpack --config=build/webpack.unit.config.js --env=test --display=none",
"unit": "electron-mocha temp/specs.js --renderer --require source-map-support/register",
"pree2e": "webpack --config=build/webpack.app.config.js --env=test --display=none && webpack --config=build/webpack.e2e.config.js --env=test --display=none",
"e2e": "mocha temp/e2e.js --require source-map-support/register",
"test": "npm run unit && npm run e2e",
"start": "node build/start.js",
"prebuild": "webpack --config=build/webpack.app.config.js --env=production",
"build": "electron-builder"
},
"eslintConfig": {
"extends": "react-app"
"dependencies": {
"bencode": "^1.0.0",
"bitfield": "^1.1.2",
"debug": "^3.1.0",
"diskusage": "^0.2.4",
"fs-jetpack": "^1.2.0",
"ipaddr.js": "^1.5.4",
"material-ui": "^0.20.0",
"moment": "^2.20.1",
"mysql": "^2.15.0",
"react": "^16.2.0",
"react-dom": "^16.2.0",
"react-input-range": "^1.3.0",
"react-tap-event-plugin": "^3.0.2"
},
"devDependencies": {
"@babel/core": "^7.0.0-beta.38",
"@babel/preset-env": "^7.0.0-beta.38",
"@babel/preset-react": "^7.0.0-beta.38",
"@babel/preset-stage-0": "^7.0.0-beta.38",
"babel-loader": "^8.0.0-beta.0",
"babel-plugin-transform-object-rest-spread": "^7.0.0-beta.3",
"chai": "^4.1.0",
"css-loader": "^0.28.7",
"electron": "1.7.11",
"electron-builder": "^19.43.3",
"electron-mocha": "^5.0.0",
"friendly-errors-webpack-plugin": "^1.6.1",
"mocha": "^4.0.1",
"source-map-support": "^0.5.0",
"spectron": "^3.7.2",
"style-loader": "^0.19.0",
"url-loader": "^0.6.2",
"webpack": "^3.8.1",
"webpack-merge": "^4.1.0",
"webpack-node-externals": "^1.6.0"
}
}

View File

@ -1,46 +0,0 @@
const mysql = require('mysql');
const {torrentTypeDetect} = require('../lib/content');
const mysqlSettings = {
host : 'localhost',
user : 'btsearch',
password : 'pirateal100x',
database : 'btsearch'
};
socketMysql = mysql.createConnection(mysqlSettings);
socketMysql.connect(function(mysqlError) {
if (mysqlError) {
console.error('error connecting: ' + mysqlError.stack);
return;
}
let current = 0;
function func(index) {
socketMysql.query("SELECT * FROM `torrents` WHERE (`contentType` = 'video' or contentType = 'pictures' or contentType = 'archive') and contentCategory IS NULL LIMIT ?, 30000", [index], function (error, torrents, fields) {
let records = torrents.length;
let next = index + records;
if(records == 0)
return;
torrents.forEach((torrent) => {
socketMysql.query('SELECT * FROM `files` WHERE hash = ?', torrent.hash, function (error, files, fields) {
torrentTypeDetect(torrent, files);
if(torrent.contentType && torrent.contentCategory == 'xxx') {
socketMysql.query('UPDATE `torrents` SET `contentType` = ?, contentCategory = ? WHERE `hash` = ?', [torrent.contentType, torrent.contentCategory, torrent.hash], function (error, files, fields) {
console.log('xxx ' + torrent.name + ': ' + (++current) + '/' + torrents.length);
if(--records == 0)
func(next)
});
} else {
console.log((++current) + '/' + torrents.length);
if(--records == 0)
func(next)
}
});
});
});
}
func(0);
});

View File

@ -1,38 +0,0 @@
const mysql = require('mysql');
const {torrentTypeDetect} = require('../lib/content');
const mysqlSettings = {
host : 'localhost',
user : 'btsearch',
password : 'pirateal100x',
database : 'btsearch'
};
socketMysql = mysql.createConnection(mysqlSettings);
socketMysql.connect(function(mysqlError) {
if (mysqlError) {
console.error('error connecting: ' + mysqlError.stack);
return;
}
let inc = 0;
socketMysql.query('SELECT * FROM `torrents` WHERE `seeders` IS NULL AND files > 1000', function (error, torrents, fields) {
torrents.forEach(({hash, name}) => {
console.log(name + 'deleted');
socketMysql.query('DELETE FROM `files` WHERE hash = ?', hash, function (error, files, fields) {
if(!files)
console.log(error);
console.log(name + ' files deleted')
});
socketMysql.query('DELETE FROM `torrents` WHERE hash = ?', hash, function (error, files, fields) {
if(!files)
console.log(error);
console.log(name + ' torrent deleted')
});
});
console.log('affected torrents: ' + torrents.length);
});
});

View File

@ -1,34 +0,0 @@
const mysql = require('mysql');
const {torrentTypeDetect} = require('../lib/content');
const mysqlSettings = {
host : 'localhost',
user : 'btsearch',
password : 'pirateal100x',
database : 'btsearch'
};
socketMysql = mysql.createConnection(mysqlSettings);
socketMysql.connect(function(mysqlError) {
if (mysqlError) {
console.error('error connecting: ' + mysqlError.stack);
return;
}
let inc = 0;
socketMysql.query('SELECT * FROM `torrents` WHERE `contentType` IS NULL', function (error, torrents, fields) {
torrents.forEach((torrent) => {
socketMysql.query('SELECT * FROM `files` WHERE hash = ?', torrent.hash, function (error, files, fields) {
torrentTypeDetect(torrent, files);
if(torrent.contentType) {
socketMysql.query('UPDATE `torrents` SET `contentType` = ? WHERE `hash` = ?', [torrent.contentType, torrent.hash], function (error, files, fields) {
console.log((++inc) + '/' + torrents.length);
});
} else {
console.log((++inc) + '/' + torrents.length);
}
});
});
});
});

View File

@ -1,27 +0,0 @@
var system = require('system');
var url = system.args[1];
var page = require('webpage').create();
page.open(url, function(status) {
if (status == 'fail')
phantom.exit();
var intervalTime = 1;
setInterval(function() {
var ready = page.evaluate(function () {
if(typeof window.isReady !== 'undefined')
{
return window.isReady();
}
return false;
});
if (ready) {
var out = page.content;
out = out.replace(/<script[^>]+>(.|\n|\r)*?<\/script\s*>/ig, '');
out = out.replace('<meta name="fragment" content="!" />', '');
out = out.replace('<meta name="fragment" content="!">', '');
console.log(out)
phantom.exit();
}
}, intervalTime++);
});

Binary file not shown.

Before

Width:  |  Height:  |  Size: 66 KiB

View File

@ -1,13 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="fragment" content="!" />
<link rel="shortcut icon" href="%PUBLIC_URL%/favicon.ico">
<title>BT Search</title>
</head>
<body>
<div id="root"></div>
</body>
</html>

BIN
resources/icon.icns Normal file

Binary file not shown.

BIN
resources/icon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 189 KiB

BIN
resources/icons/512x512.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

View File

@ -1,224 +0,0 @@
// Do this as the first thing so that any code reading it knows the right env.
process.env.NODE_ENV = 'production';
// Load environment variables from .env file. Suppress warnings using silent
// if this file is missing. dotenv will never modify any environment variables
// that have already been set.
// https://github.com/motdotla/dotenv
require('dotenv').config({silent: true});
var chalk = require('chalk');
var fs = require('fs-extra');
var path = require('path');
var pathExists = require('path-exists');
var filesize = require('filesize');
var gzipSize = require('gzip-size').sync;
var webpack = require('webpack');
var config = require('../config/webpack.config.prod');
var paths = require('../config/paths');
var checkRequiredFiles = require('react-dev-utils/checkRequiredFiles');
var recursive = require('recursive-readdir');
var stripAnsi = require('strip-ansi');
var useYarn = pathExists.sync(paths.yarnLockFile);
// Warn and crash if required files are missing
if (!checkRequiredFiles([paths.appHtml, paths.appIndexJs])) {
process.exit(1);
}
// Input: /User/dan/app/build/static/js/main.82be8.js
// Output: /static/js/main.js
function removeFileNameHash(fileName) {
return fileName
.replace(paths.appBuild, '')
.replace(/\/?(.*)(\.\w+)(\.js|\.css)/, (match, p1, p2, p3) => p1 + p3);
}
// Input: 1024, 2048
// Output: "(+1 KB)"
function getDifferenceLabel(currentSize, previousSize) {
var FIFTY_KILOBYTES = 1024 * 50;
var difference = currentSize - previousSize;
var fileSize = !Number.isNaN(difference) ? filesize(difference) : 0;
if (difference >= FIFTY_KILOBYTES) {
return chalk.red('+' + fileSize);
} else if (difference < FIFTY_KILOBYTES && difference > 0) {
return chalk.yellow('+' + fileSize);
} else if (difference < 0) {
return chalk.green(fileSize);
} else {
return '';
}
}
// First, read the current file sizes in build directory.
// This lets us display how much they changed later.
recursive(paths.appBuild, (err, fileNames) => {
var previousSizeMap = (fileNames || [])
.filter(fileName => /\.(js|css)$/.test(fileName))
.reduce((memo, fileName) => {
var contents = fs.readFileSync(fileName);
var key = removeFileNameHash(fileName);
memo[key] = gzipSize(contents);
return memo;
}, {});
// Remove all content but keep the directory so that
// if you're in it, you don't end up in Trash
fs.emptyDirSync(paths.appBuild);
// Start the webpack build
build(previousSizeMap);
// Merge with the public folder
copyPublicFolder();
});
// Print a detailed summary of build files.
function printFileSizes(stats, previousSizeMap) {
var assets = stats.toJson().assets
.filter(asset => /\.(js|css)$/.test(asset.name))
.map(asset => {
var fileContents = fs.readFileSync(paths.appBuild + '/' + asset.name);
var size = gzipSize(fileContents);
var previousSize = previousSizeMap[removeFileNameHash(asset.name)];
var difference = getDifferenceLabel(size, previousSize);
return {
folder: path.join('build', path.dirname(asset.name)),
name: path.basename(asset.name),
size: size,
sizeLabel: filesize(size) + (difference ? ' (' + difference + ')' : '')
};
});
assets.sort((a, b) => b.size - a.size);
var longestSizeLabelLength = Math.max.apply(null,
assets.map(a => stripAnsi(a.sizeLabel).length)
);
assets.forEach(asset => {
var sizeLabel = asset.sizeLabel;
var sizeLength = stripAnsi(sizeLabel).length;
if (sizeLength < longestSizeLabelLength) {
var rightPadding = ' '.repeat(longestSizeLabelLength - sizeLength);
sizeLabel += rightPadding;
}
console.log(
' ' + sizeLabel +
' ' + chalk.dim(asset.folder + path.sep) + chalk.cyan(asset.name)
);
});
}
// Print out errors
function printErrors(summary, errors) {
console.log(chalk.red(summary));
console.log();
errors.forEach(err => {
console.log(err.message || err);
console.log();
});
}
// Create the production build and print the deployment instructions.
function build(previousSizeMap) {
console.log('Creating an optimized production build...');
webpack(config).run((err, stats) => {
if (err) {
printErrors('Failed to compile.', [err]);
process.exit(1);
}
if (stats.compilation.errors.length) {
printErrors('Failed to compile.', stats.compilation.errors);
process.exit(1);
}
if (process.env.CI && stats.compilation.warnings.length) {
printErrors('Failed to compile.', stats.compilation.warnings);
process.exit(1);
}
console.log(chalk.green('Compiled successfully.'));
console.log();
console.log('File sizes after gzip:');
console.log();
printFileSizes(stats, previousSizeMap);
console.log();
var openCommand = process.platform === 'win32' ? 'start' : 'open';
var appPackage = require(paths.appPackageJson);
var homepagePath = appPackage.homepage;
var publicPath = config.output.publicPath;
if (homepagePath && homepagePath.indexOf('.github.io/') !== -1) {
// "homepage": "http://user.github.io/project"
console.log('The project was built assuming it is hosted at ' + chalk.green(publicPath) + '.');
console.log('You can control this with the ' + chalk.green('homepage') + ' field in your ' + chalk.cyan('package.json') + '.');
console.log();
console.log('The ' + chalk.cyan('build') + ' folder is ready to be deployed.');
console.log('To publish it at ' + chalk.green(homepagePath) + ', run:');
// If script deploy has been added to package.json, skip the instructions
if (typeof appPackage.scripts.deploy === 'undefined') {
console.log();
if (useYarn) {
console.log(' ' + chalk.cyan('yarn') + ' add --dev gh-pages');
} else {
console.log(' ' + chalk.cyan('npm') + ' install --save-dev gh-pages');
}
console.log();
console.log('Add the following script in your ' + chalk.cyan('package.json') + '.');
console.log();
console.log(' ' + chalk.dim('// ...'));
console.log(' ' + chalk.yellow('"scripts"') + ': {');
console.log(' ' + chalk.dim('// ...'));
console.log(' ' + chalk.yellow('"deploy"') + ': ' + chalk.yellow('"npm run build&&gh-pages -d build"'));
console.log(' }');
console.log();
console.log('Then run:');
}
console.log();
console.log(' ' + chalk.cyan(useYarn ? 'yarn' : 'npm') + ' run deploy');
console.log();
} else if (publicPath !== '/') {
// "homepage": "http://mywebsite.com/project"
console.log('The project was built assuming it is hosted at ' + chalk.green(publicPath) + '.');
console.log('You can control this with the ' + chalk.green('homepage') + ' field in your ' + chalk.cyan('package.json') + '.');
console.log();
console.log('The ' + chalk.cyan('build') + ' folder is ready to be deployed.');
console.log();
} else {
// no homepage or "homepage": "http://mywebsite.com"
console.log('The project was built assuming it is hosted at the server root.');
if (homepagePath) {
// "homepage": "http://mywebsite.com"
console.log('You can control this with the ' + chalk.green('homepage') + ' field in your ' + chalk.cyan('package.json') + '.');
console.log();
} else {
// no homepage
console.log('To override this, specify the ' + chalk.green('homepage') + ' in your ' + chalk.cyan('package.json') + '.');
console.log('For example, add this to build it for GitHub Pages:')
console.log();
console.log(' ' + chalk.green('"homepage"') + chalk.cyan(': ') + chalk.green('"http://myname.github.io/myapp"') + chalk.cyan(','));
console.log();
}
console.log('The ' + chalk.cyan('build') + ' folder is ready to be deployed.');
console.log('You may also serve it locally with a static server:')
console.log();
if (useYarn) {
console.log(' ' + chalk.cyan('yarn') + ' global add pushstate-server');
} else {
console.log(' ' + chalk.cyan('npm') + ' install -g pushstate-server');
}
console.log(' ' + chalk.cyan('pushstate-server') + ' build');
console.log(' ' + chalk.cyan(openCommand) + ' http://localhost:9000');
console.log();
}
});
}
function copyPublicFolder() {
fs.copySync(paths.appPublic, paths.appBuild, {
dereference: true,
filter: file => file !== paths.appHtml
});
}

View File

@ -1,315 +0,0 @@
process.env.NODE_ENV = 'development';
// Load environment variables from .env file. Suppress warnings using silent
// if this file is missing. dotenv will never modify any environment variables
// that have already been set.
// https://github.com/motdotla/dotenv
require('dotenv').config({silent: true});
var chalk = require('chalk');
var webpack = require('webpack');
var WebpackDevServer = require('webpack-dev-server');
var historyApiFallback = require('connect-history-api-fallback');
var httpProxyMiddleware = require('http-proxy-middleware');
var detect = require('detect-port');
var clearConsole = require('react-dev-utils/clearConsole');
var checkRequiredFiles = require('react-dev-utils/checkRequiredFiles');
var formatWebpackMessages = require('react-dev-utils/formatWebpackMessages');
var getProcessForPort = require('react-dev-utils/getProcessForPort');
var openBrowser = require('react-dev-utils/openBrowser');
var prompt = require('react-dev-utils/prompt');
var pathExists = require('path-exists');
var config = require('../config/webpack.config.dev');
var paths = require('../config/paths');
var useYarn = pathExists.sync(paths.yarnLockFile);
var cli = useYarn ? 'yarn' : 'npm';
var isInteractive = process.stdout.isTTY;
// Warn and crash if required files are missing
if (!checkRequiredFiles([paths.appHtml, paths.appIndexJs])) {
process.exit(1);
}
// Tools like Cloud9 rely on this.
var DEFAULT_PORT = process.env.PORT || 3000;
var compiler;
var handleCompile;
// You can safely remove this after ejecting.
// We only use this block for testing of Create React App itself:
var isSmokeTest = process.argv.some(arg => arg.indexOf('--smoke-test') > -1);
if (isSmokeTest) {
handleCompile = function (err, stats) {
if (err || stats.hasErrors() || stats.hasWarnings()) {
process.exit(1);
} else {
process.exit(0);
}
};
}
function setupCompiler(host, port, protocol) {
// "Compiler" is a low-level interface to Webpack.
// It lets us listen to some events and provide our own custom messages.
compiler = webpack(config, handleCompile);
// "invalid" event fires when you have changed a file, and Webpack is
// recompiling a bundle. WebpackDevServer takes care to pause serving the
// bundle, so if you refresh, it'll wait instead of serving the old one.
// "invalid" is short for "bundle invalidated", it doesn't imply any errors.
compiler.plugin('invalid', function() {
if (isInteractive) {
clearConsole();
}
console.log('Compiling...');
});
var isFirstCompile = true;
// "done" event fires when Webpack has finished recompiling the bundle.
// Whether or not you have warnings or errors, you will get this event.
compiler.plugin('done', function(stats) {
if (isInteractive) {
clearConsole();
}
// We have switched off the default Webpack output in WebpackDevServer
// options so we are going to "massage" the warnings and errors and present
// them in a readable focused way.
var messages = formatWebpackMessages(stats.toJson({}, true));
var isSuccessful = !messages.errors.length && !messages.warnings.length;
var showInstructions = isSuccessful && (isInteractive || isFirstCompile);
if (isSuccessful) {
console.log(chalk.green('Compiled successfully!'));
}
if (showInstructions) {
console.log();
console.log('The app is running at:');
console.log();
console.log(' ' + chalk.cyan(protocol + '://' + host + ':' + port + '/'));
console.log();
console.log('Note that the development build is not optimized.');
console.log('To create a production build, use ' + chalk.cyan(cli + ' run build') + '.');
console.log();
isFirstCompile = false;
}
// If errors exist, only show errors.
if (messages.errors.length) {
console.log(chalk.red('Failed to compile.'));
console.log();
messages.errors.forEach(message => {
console.log(message);
console.log();
});
return;
}
// Show warnings if no errors were found.
if (messages.warnings.length) {
console.log(chalk.yellow('Compiled with warnings.'));
console.log();
messages.warnings.forEach(message => {
console.log(message);
console.log();
});
// Teach some ESLint tricks.
console.log('You may use special comments to disable some warnings.');
console.log('Use ' + chalk.yellow('// eslint-disable-next-line') + ' to ignore the next line.');
console.log('Use ' + chalk.yellow('/* eslint-disable */') + ' to ignore all warnings in a file.');
}
});
}
// We need to provide a custom onError function for httpProxyMiddleware.
// It allows us to log custom error messages on the console.
function onProxyError(proxy) {
return function(err, req, res){
var host = req.headers && req.headers.host;
console.log(
chalk.red('Proxy error:') + ' Could not proxy request ' + chalk.cyan(req.url) +
' from ' + chalk.cyan(host) + ' to ' + chalk.cyan(proxy) + '.'
);
console.log(
'See https://nodejs.org/api/errors.html#errors_common_system_errors for more information (' +
chalk.cyan(err.code) + ').'
);
console.log();
// And immediately send the proper error response to the client.
// Otherwise, the request will eventually timeout with ERR_EMPTY_RESPONSE on the client side.
if (res.writeHead && !res.headersSent) {
res.writeHead(500);
}
res.end('Proxy error: Could not proxy request ' + req.url + ' from ' +
host + ' to ' + proxy + ' (' + err.code + ').'
);
}
}
function addMiddleware(devServer) {
// `proxy` lets you to specify a fallback server during development.
// Every unrecognized request will be forwarded to it.
var proxy = require(paths.appPackageJson).proxy;
devServer.use(historyApiFallback({
// Paths with dots should still use the history fallback.
// See https://github.com/facebookincubator/create-react-app/issues/387.
disableDotRule: true,
// For single page apps, we generally want to fallback to /index.html.
// However we also want to respect `proxy` for API calls.
// So if `proxy` is specified, we need to decide which fallback to use.
// We use a heuristic: if request `accept`s text/html, we pick /index.html.
// Modern browsers include text/html into `accept` header when navigating.
// However API calls like `fetch()` wont generally accept text/html.
// If this heuristic doesnt work well for you, dont use `proxy`.
htmlAcceptHeaders: proxy ?
['text/html'] :
['text/html', '*/*']
}));
if (proxy) {
if (typeof proxy !== 'string') {
console.log(chalk.red('When specified, "proxy" in package.json must be a string.'));
console.log(chalk.red('Instead, the type of "proxy" was "' + typeof proxy + '".'));
console.log(chalk.red('Either remove "proxy" from package.json, or make it a string.'));
process.exit(1);
}
// Otherwise, if proxy is specified, we will let it handle any request.
// There are a few exceptions which we won't send to the proxy:
// - /index.html (served as HTML5 history API fallback)
// - /*.hot-update.json (WebpackDevServer uses this too for hot reloading)
// - /sockjs-node/* (WebpackDevServer uses this for hot reloading)
// Tip: use https://jex.im/regulex/ to visualize the regex
var mayProxy = /^(?!\/(index\.html$|.*\.hot-update\.json$|sockjs-node\/)).*$/;
// Pass the scope regex both to Express and to the middleware for proxying
// of both HTTP and WebSockets to work without false positives.
var hpm = httpProxyMiddleware(pathname => mayProxy.test(pathname), {
target: proxy,
logLevel: 'silent',
onProxyReq: function(proxyReq, req, res) {
// Browers may send Origin headers even with same-origin
// requests. To prevent CORS issues, we have to change
// the Origin to match the target URL.
if (proxyReq.getHeader('origin')) {
proxyReq.setHeader('origin', proxy);
}
},
onError: onProxyError(proxy),
secure: false,
changeOrigin: true,
ws: true
});
devServer.use(mayProxy, hpm);
// Listen for the websocket 'upgrade' event and upgrade the connection.
// If this is not done, httpProxyMiddleware will not try to upgrade until
// an initial plain HTTP request is made.
devServer.listeningApp.on('upgrade', hpm.upgrade);
}
// Finally, by now we have certainly resolved the URL.
// It may be /index.html, so let the dev server try serving it again.
devServer.use(devServer.middleware);
}
function runDevServer(host, port, protocol) {
var devServer = new WebpackDevServer(compiler, {
// Enable gzip compression of generated files.
compress: true,
// Silence WebpackDevServer's own logs since they're generally not useful.
// It will still show compile warnings and errors with this setting.
clientLogLevel: 'none',
// By default WebpackDevServer serves physical files from current directory
// in addition to all the virtual build products that it serves from memory.
// This is confusing because those files wont automatically be available in
// production build folder unless we copy them. However, copying the whole
// project directory is dangerous because we may expose sensitive files.
// Instead, we establish a convention that only files in `public` directory
// get served. Our build script will copy `public` into the `build` folder.
// In `index.html`, you can get URL of `public` folder with %PUBLIC_PATH%:
// <link rel="shortcut icon" href="%PUBLIC_URL%/favicon.ico">
// In JavaScript code, you can access it with `process.env.PUBLIC_URL`.
// Note that we only recommend to use `public` folder as an escape hatch
// for files like `favicon.ico`, `manifest.json`, and libraries that are
// for some reason broken when imported through Webpack. If you just want to
// use an image, put it in `src` and `import` it from JavaScript instead.
contentBase: paths.appPublic,
// Enable hot reloading server. It will provide /sockjs-node/ endpoint
// for the WebpackDevServer client so it can learn when the files were
// updated. The WebpackDevServer client is included as an entry point
// in the Webpack development configuration. Note that only changes
// to CSS are currently hot reloaded. JS changes will refresh the browser.
hot: true,
// It is important to tell WebpackDevServer to use the same "root" path
// as we specified in the config. In development, we always serve from /.
publicPath: config.output.publicPath,
// WebpackDevServer is noisy by default so we emit custom message instead
// by listening to the compiler events with `compiler.plugin` calls above.
quiet: true,
// Reportedly, this avoids CPU overload on some systems.
// https://github.com/facebookincubator/create-react-app/issues/293
watchOptions: {
ignored: /node_modules/
},
// Enable HTTPS if the HTTPS environment variable is set to 'true'
https: protocol === "https",
host: host
});
// Our custom middleware proxies requests to /index.html or a remote API.
addMiddleware(devServer);
// Launch WebpackDevServer.
devServer.listen(port, (err, result) => {
if (err) {
return console.log(err);
}
if (isInteractive) {
clearConsole();
}
console.log(chalk.cyan('Starting the development server...'));
console.log();
if (isInteractive) {
openBrowser(protocol + '://' + host + ':' + port + '/');
}
});
}
function run(port) {
var protocol = process.env.HTTPS === 'true' ? "https" : "http";
var host = process.env.HOST || 'localhost';
setupCompiler(host, port, protocol);
runDevServer(host, port, protocol);
}
// We attempt to use the default port but if it is busy, we offer the user to
// run on a different port. `detect()` Promise resolves to the next free port.
detect(DEFAULT_PORT).then(port => {
if (port === DEFAULT_PORT) {
run(port);
return;
}
if (isInteractive) {
clearConsole();
var existingProcess = getProcessForPort(DEFAULT_PORT);
var question =
chalk.yellow('Something is already running on port ' + DEFAULT_PORT + '.' +
((existingProcess) ? ' Probably:\n ' + existingProcess : '')) +
'\n\nWould you like to run the app on another port instead?';
prompt(question, true).then(shouldChangePort => {
if (shouldChangePort) {
run(port);
}
});
} else {
console.log(chalk.red('Something is already running on port ' + DEFAULT_PORT + '.'));
}
});

View File

@ -1,31 +0,0 @@
process.env.NODE_ENV = 'test';
process.env.PUBLIC_URL = '';
// Load environment variables from .env file. Suppress warnings using silent
// if this file is missing. dotenv will never modify any environment variables
// that have already been set.
// https://github.com/motdotla/dotenv
require('dotenv').config({silent: true});
const jest = require('jest');
const argv = process.argv.slice(2);
// Watch unless on CI or in coverage mode
if (!process.env.CI && argv.indexOf('--coverage') < 0) {
argv.push('--watch');
}
// A temporary hack to clear terminal correctly.
// You can remove this after updating to Jest 18 when it's out.
// https://github.com/facebook/jest/pull/2230
var realWrite = process.stdout.write;
var CLEAR = process.platform === 'win32' ? '\x1Bc' : '\x1B[2J\x1B[3J\x1B[H';
process.stdout.write = function(chunk, encoding, callback) {
if (chunk === '\x1B[2J\x1B[H') {
chunk = CLEAR;
}
return realWrite.call(this, chunk, encoding, callback);
};
jest.run(argv);

View File

@ -1,147 +0,0 @@
#
# Minimal Sphinx configuration sample (clean, simple, functional)
#
source files_index
{
type = mysql
sql_host = localhost
sql_user = btsearch
sql_pass = pirateal100x
sql_db = btsearch
sql_port = 3306 # optional, default is 3306
sql_query_pre = SET NAMES utf8
sql_query_pre = REPLACE INTO sphinx_counter SELECT 1, MAX(fileid) FROM files
sql_query = \
SELECT fls.fileid, fls.path, fls.size as filesize, \
torrents.hash as hash, torrents.name as name, torrents.size as size, \
torrents.seeders, torrents.leechers, torrents.completed, \
torrents.files, torrents.contentType, torrents.contentCategory, \
torrents.good, torrents.bad, UNIX_TIMESTAMP(torrents.added) AS added \
FROM files as fls INNER JOIN torrents ON(torrents.hash = fls.hash)
sql_field_string = path
sql_attr_string = hash
sql_attr_bigint = filesize
sql_attr_string = name
sql_attr_bigint = size
sql_attr_uint = files
sql_attr_uint = seeders
sql_attr_uint = leechers
sql_attr_uint = completed
sql_attr_string = contentType
sql_attr_string = contentCategory
sql_attr_uint = good
sql_attr_uint = bad
sql_attr_timestamp = added
}
index files_index
{
source = files_index
path = /var/lib/sphinx/files
min_word_len = 3
}
source files_index_delta : files_index
{
sql_query_pre = SET NAMES utf8
sql_query = \
SELECT fls.fileid, fls.path, fls.size as filesize, \
torrents.hash as hash, torrents.name as name, torrents.size as size, \
torrents.seeders, torrents.leechers, torrents.completed, \
torrents.files, torrents.contentType, torrents.contentCategory, \
torrents.good, torrents.bad, UNIX_TIMESTAMP(torrents.added) AS added \
FROM files as fls INNER JOIN torrents ON(torrents.hash = fls.hash) \
WHERE fls.fileid > ( SELECT max_doc_id FROM sphinx_counter WHERE counter_id = 1 )
}
index files_index_delta : files_index
{
source = files_index_delta
path = /var/lib/sphinx/files.delta
}
source torrents_index
{
type = mysql
sql_host = localhost
sql_user = btsearch
sql_pass = pirateal100x
sql_db = btsearch
sql_port = 3306 # optional, default is 3306
sql_query_pre = SET NAMES utf8
sql_query_pre = REPLACE INTO sphinx_counter SELECT 2, MAX(torrentid) FROM torrents
sql_query = \
SELECT torrents.torrentid, torrents.hash as hash, torrents.name as name, torrents.size as size, \
torrents.seeders, torrents.leechers, torrents.completed, \
torrents.files, torrents.contentType, torrents.contentCategory, \
torrents.good, torrents.bad, UNIX_TIMESTAMP(torrents.added) AS added \
FROM torrents
sql_attr_string = hash
sql_field_string = name
sql_attr_bigint = size
sql_attr_uint = files
sql_attr_uint = seeders
sql_attr_uint = leechers
sql_attr_uint = completed
sql_attr_string = contentType
sql_attr_string = contentCategory
sql_attr_uint = good
sql_attr_uint = bad
sql_attr_timestamp = added
}
index torrents_index
{
source = torrents_index
path = /var/lib/sphinx/torrents
min_word_len = 3
}
source torrents_index_delta : torrents_index
{
sql_query_pre = SET NAMES utf8
sql_query = \
SELECT torrents.torrentid, torrents.hash as hash, torrents.name as name, torrents.size as size, \
torrents.seeders, torrents.leechers, torrents.completed, \
torrents.files, torrents.contentType, torrents.contentCategory, \
torrents.good, torrents.bad, UNIX_TIMESTAMP(torrents.added) AS added \
FROM torrents \
WHERE torrents.torrentid > ( SELECT max_doc_id FROM sphinx_counter WHERE counter_id = 2 )
}
index torrents_index_delta : torrents_index
{
source = torrents_index_delta
path = /var/lib/sphinx/torrents.delta
}
indexer
{
mem_limit = 128M
}
searchd
{
listen = 9312
listen = 9306:mysql41
log = /var/log/sphinx/searchd.log
query_log = /var/log/sphinx/query.log
read_timeout = 5
max_children = 30
pid_file = /var/run/sphinx/searchd.pid
seamless_rotate = 1
preopen_indexes = 1
unlink_old = 1
workers = threads # for RT to work
binlog_path = /var/lib/sphinx/
}

View File

@ -1,55 +0,0 @@
import React, { Component } from 'react';
import './app.css';
import './router';
import PagesPie from './pages-pie.js';
import registerServiceWorker from './registerServiceWorker';
import injectTapEventPlugin from 'react-tap-event-plugin';
import MuiThemeProvider from 'material-ui/styles/MuiThemeProvider';
var io = require("socket.io-client");
window.torrentSocket = io(document.location.protocol + '//' + document.location.hostname + (process.env.NODE_ENV === 'production' ? '/' : ':8095/'));
// Needed for onTouchTap
// http://stackoverflow.com/a/34015469/988941
injectTapEventPlugin();
registerServiceWorker();
let loadersCount = 0;
let appReady = false;
window.customLoader = (func, onLoading, onLoaded) => {
loadersCount++;
if(onLoading) {
onLoading();
}
return (...args) => {
func(...args);
if(onLoaded) {
onLoaded();
}
loadersCount--;
}
};
window.isReady = () => {
return (appReady && loadersCount === 0)
}
class App extends Component {
componentDidMount() {
window.router()
appReady = true;
}
componentWillUnmount() {
appReady = false;
}
render() {
return (
<MuiThemeProvider>
<PagesPie />
</MuiThemeProvider>
);
}
}
export default App;

86
src/app/app.js Normal file
View File

@ -0,0 +1,86 @@
import React, { Component } from 'react';
import './app.css';
import './router';
import PagesPie from './pages-pie.js';
//import registerServiceWorker from './registerServiceWorker';
import injectTapEventPlugin from 'react-tap-event-plugin';
import MuiThemeProvider from 'material-ui/styles/MuiThemeProvider';
const { ipcRenderer, remote } = require('electron');
//var io = require("socket.io-client");
//window.torrentSocket = io(document.location.protocol + '//' + document.location.hostname + (process.env.NODE_ENV === 'production' ? '/' : ':8095/'));
window.torrentSocket = {}
window.torrentSocket.callbacks = {}
window.torrentSocket.on = (name, func) => {
ipcRenderer.on(name, (event, data) => {
func(data)
});
}
window.torrentSocket.off = (name, func) => {
if(!func)
ipcRenderer.removeListener(name);
else
ipcRenderer.removeListener(name, func);
}
window.torrentSocket.emit = (name, ...data) => {
if(typeof data[data.length - 1] === 'function')
{
const id = Math.random().toString(36).substring(5)
window.torrentSocket.callbacks[id] = data[data.length - 1];
data[data.length - 1] = {callback: id}
}
ipcRenderer.send(name, data)
}
ipcRenderer.on('callback', (event, id, data) => {
const callback = window.torrentSocket.callbacks[id]
if(callback)
callback(data)
delete window.torrentSocket.callbacks[id]
});
// Needed for onTouchTap
// http://stackoverflow.com/a/34015469/988941
injectTapEventPlugin();
//registerServiceWorker();
let loadersCount = 0;
let appReady = false;
window.customLoader = (func, onLoading, onLoaded) => {
loadersCount++;
if(onLoading) {
onLoading();
}
return (...args) => {
func(...args);
if(onLoaded) {
onLoaded();
}
loadersCount--;
}
};
window.isReady = () => {
return (appReady && loadersCount === 0)
}
class App extends Component {
componentDidMount() {
window.router()
appReady = true;
}
componentWillUnmount() {
appReady = false;
}
render() {
return (
<MuiThemeProvider>
<PagesPie />
</MuiThemeProvider>
);
}
}
export default App;

View File

@ -22,7 +22,6 @@ export default (props) => {
C243.779,80.572,238.768,71.728,220.195,71.427z"/>
</svg>
<iframe data-aa='405459' src='//ad.a-ads.com/405459?size=468x60' scrolling='no' style={{width: '100%', height: '60px', border: '0px', padding: '0', overflow: 'hidden'}} allowTransparency='true'></iframe>
<div className='fs0-75 pad0-75'>Don't hesitate and visit the banners, we are trying to survive among dark blue sea</div>
</div>
)

View File

Before

Width:  |  Height:  |  Size: 28 KiB

After

Width:  |  Height:  |  Size: 28 KiB

View File

Before

Width:  |  Height:  |  Size: 218 KiB

After

Width:  |  Height:  |  Size: 218 KiB

View File

@ -34,7 +34,7 @@ export default class IndexPage extends Page {
}
render() {
return (
<div>
<div id='index-window'>
<Header />
<Search />
<div className='column center w100p pad0-75'>

View File

@ -13,5 +13,5 @@ import './index.css';
ReactDOM.render(
<App />,
document.getElementById('root')
document.getElementById('mount-point')
);

View File

@ -1,6 +1,6 @@
import React, { Component } from 'react';
import InputRange from 'react-input-range';
import 'react-input-range/lib/css/index.css';
import './input-range.css';
import formatBytes from './format-bytes'
import SelectField from 'material-ui/SelectField';

83
src/app/input-range.css Normal file
View File

@ -0,0 +1,83 @@
.input-range__slider {
appearance: none;
background: #3f51b5;
border: 1px solid #3f51b5;
border-radius: 100%;
cursor: pointer;
display: block;
height: 1rem;
margin-left: -0.5rem;
margin-top: -0.65rem;
outline: none;
position: absolute;
top: 50%;
transition: transform 0.3s ease-out, box-shadow 0.3s ease-out;
width: 1rem; }
.input-range__slider:active {
transform: scale(1.3); }
.input-range__slider:focus {
box-shadow: 0 0 0 5px rgba(63, 81, 181, 0.2); }
.input-range--disabled .input-range__slider {
background: #cccccc;
border: 1px solid #cccccc;
box-shadow: none;
transform: none; }
.input-range__slider-container {
transition: left 0.3s ease-out; }
.input-range__label {
color: #aaaaaa;
font-family: "Helvetica Neue", san-serif;
font-size: 0.8rem;
transform: translateZ(0);
white-space: nowrap; }
.input-range__label--min,
.input-range__label--max {
bottom: -1.4rem;
position: absolute; }
.input-range__label--min {
left: 0; }
.input-range__label--max {
right: 0; }
.input-range__label--value {
position: absolute;
top: -1.8rem; }
.input-range__label-container {
left: -50%;
position: relative; }
.input-range__label--max .input-range__label-container {
left: 50%; }
.input-range__track {
background: #eeeeee;
border-radius: 0.3rem;
cursor: pointer;
display: block;
height: 0.3rem;
position: relative;
transition: left 0.3s ease-out, width 0.3s ease-out; }
.input-range--disabled .input-range__track {
background: #eeeeee; }
.input-range__track--background {
left: 0;
margin-top: -0.15rem;
position: absolute;
right: 0;
top: 50%; }
.input-range__track--active {
background: #3f51b5; }
.input-range {
height: 1rem;
position: relative;
width: 100%; }
/*# sourceMappingURL=input-range.css.map */

View File

@ -1,6 +1,6 @@
import React, { Component } from 'react';
import InputRange from 'react-input-range';
import 'react-input-range/lib/css/index.css';
import './input-range.css';
import formatBytes from './format-bytes'
import SelectField from 'material-ui/SelectField';

View File

@ -1,5 +1,4 @@
import router from 'page';
window.router = router;
//import router from 'page';
import PagesPie from './pages-pie.js';
import IndexPage from './index-page.js'
@ -8,6 +7,48 @@ import DMCAPage from './dmca-page.js'
import AdminPage from './admin-page.js'
import TopPage from './top-page.js'
let routers = {}
const router = (page, callback) => {
if(!callback)
{
if(!page)
routers['/'].callback()
else
{
const p = page.split('/')
const pg = routers[`${p[0]}/${p[1]}`]
if(!pg)
return
p.splice(0, 2)
const params = {}
for(let i = 0; i < p.length; i++)
{
params[pg.args[i]] = p[i]
}
console.log(params)
pg.callback({
params
})
}
return;
}
const p = page.split('/')
routers[`${p[0]}/${p[1]}`] = {callback}
routers[`${p[0]}/${p[1]}`].args = []
for(let i = 2; i < p.length; i++)
{
if(p[i].startsWith(':'))
routers[`${p[0]}/${p[1]}`].args.push(p[i].substring(1))
}
}
window.router = router;
router('/', () => {
//singleton
let pie = new PagesPie;

View File

@ -19,6 +19,37 @@ import formatBytes from './format-bytes'
let session;
class TorrentsStatistic extends Component {
constructor(props)
{
super(props)
this.stats = props.stats || {}
}
componentDidMount()
{
this.newTorrentFunc = (torrent) => {
this.stats.size += torrent.size;
this.stats.torrents++;
this.stats.files += torrent.files;
this.forceUpdate()
}
window.torrentSocket.on('newTorrent', this.newTorrentFunc);
}
componentWillUnmount()
{
if(this.newTorrentFunc)
window.torrentSocket.off('newTorrent', this.newTorrentFunc);
}
render()
{
return (
<div className='fs0-75 pad0-75' style={{color: 'rgba(0, 0, 0, 0.541176)'}}>you have information about {this.stats.torrents} torrents and around {this.stats.files} files and { formatBytes(this.stats.size, 1) } of data</div>
)
}
}
export default class Search extends Component {
constructor(props)
{
@ -299,10 +330,8 @@ export default class Search extends Component {
}
{
this.stats
?
<div className='fs0-75 pad0-75' style={{color: 'rgba(0, 0, 0, 0.541176)'}}>we have information about {this.stats.torrents} torrents and around {this.stats.files} files and { formatBytes(this.stats.size, 1) } of data</div>
:
null
&&
<TorrentsStatistic stats={this.stats} />
}
{
this.state.searchingIndicator

View File

@ -314,6 +314,10 @@ export default class TorrentPage extends Page {
target="_self"
label="Download"
secondary={true}
onClick={(e) => {
e.preventDefault();
window.open(`magnet:?xt=urn:btih:${this.torrent.hash}`, '_self')
}}
icon={<svg fill='white' viewBox="0 0 24 24"><path d="M17.374 20.235c2.444-2.981 6.626-8.157 6.626-8.157l-3.846-3.092s-2.857 3.523-6.571 8.097c-4.312 5.312-11.881-2.41-6.671-6.671 4.561-3.729 8.097-6.57 8.097-6.57l-3.092-3.842s-5.173 4.181-8.157 6.621c-2.662 2.175-3.76 4.749-3.76 7.24 0 5.254 4.867 10.139 10.121 10.139 2.487 0 5.064-1.095 7.253-3.765zm4.724-7.953l-1.699 2.111-1.74-1.397 1.701-2.114 1.738 1.4zm-10.386-10.385l1.4 1.738-2.113 1.701-1.397-1.74 2.11-1.699z"/></svg>}
/>
<div className='fs0-75 pad0-75 center column' style={{color: 'rgba(0, 0, 0, 0.541176)'}}><div>BTIH:</div><div>{this.torrent.hash}</div></div>

View File

@ -0,0 +1,311 @@
// This is main process of Electron, started as first thing when your
// app starts. It runs through entire life of your application.
// It doesn't have any windows which you can see on screen, but we can open
// window from here.
import path from "path";
import url from "url";
import { app, Menu, ipcMain, Tray } from "electron";
import { devMenuTemplate } from "./menu/dev_menu_template";
import { editMenuTemplate } from "./menu/edit_menu_template";
import createWindow from "./helpers/window";
// Special module holding environment variables which you declared
// in config/env_xxx.json file.
import env from "env";
import spiderCall from './spider'
const { spawn, exec } = require('child_process')
const fs = require('fs')
const setApplicationMenu = () => {
const menus = [editMenuTemplate];
if (env.name !== "production") {
menus.push(devMenuTemplate);
}
Menu.setApplicationMenu(Menu.buildFromTemplate(menus));
};
// Save userData in separate folders for each environment.
// Thanks to this you can use production and development versions of the app
// on same machine like those are two separate apps.
if (env.name !== "production") {
const userDataPath = app.getPath("userData");
app.setPath("userData", `${userDataPath} (${env.name})`);
}
let sphinx = undefined
let spider = undefined
const util = require('util');
if (!fs.existsSync(app.getPath("userData"))){
fs.mkdirSync(app.getPath("userData"));
}
const logFile = fs.createWriteStream(app.getPath("userData") + '/rats.log', {flags : 'w'});
const logStdout = process.stdout;
console.log = (...d) => {
logFile.write(util.format(...d) + '\n');
logStdout.write(util.format(...d) + '\n');
};
const getSphinxPath = () => {
if (fs.existsSync('./searchd')) {
return './searchd'
}
if (/^win/.test(process.platform) && fs.existsSync('./searchd.exe')) {
return './searchd.exe'
}
if (fs.existsSync(fs.realpathSync(__dirname) + '/searchd')) {
return fs.realpathSync(__dirname) + '/searchd'
}
if (fs.existsSync(fs.realpathSync(path.join(__dirname, '/../../..')) + '/searchd')) {
return fs.realpathSync(path.join(__dirname, '/../../..')) + '/searchd'
}
try {
if (process.platform === 'darwin' && fs.existsSync(fs.realpathSync(path.join(__dirname, '/../../../MacOS')) + '/searchd')) {
return fs.realpathSync(path.join(__dirname, '/../../../MacOS')) + '/searchd'
}
} catch (e) {}
if (/^win/.test(process.platform) && fs.existsSync('imports/win/searchd.exe')) {
return 'imports/win/searchd.exe'
}
if (process.platform === 'linux' && fs.existsSync('imports/linux/searchd')) {
return 'imports/linux/searchd'
}
if (process.platform === 'darwin' && fs.existsSync('imports/darwin/searchd')) {
return 'imports/darwin/searchd'
}
return 'searchd'
}
const writeSphinxConfig = (path) => {
const config = `
index torrents
{
type = rt
path = ${path}/database/torrents
rt_attr_string = hash
rt_attr_string = name
rt_field = nameIndex
rt_attr_bigint = size
rt_attr_uint = files
rt_attr_uint = piecelength
rt_attr_timestamp = added
rt_attr_string = ipv4
rt_attr_uint = port
rt_attr_string = contentType
rt_attr_string = contentCategory
rt_attr_uint = seeders
rt_attr_uint = leechers
rt_attr_uint = completed
rt_attr_timestamp = trackersChecked
rt_attr_uint = good
rt_attr_uint = bad
}
index files
{
type = rt
path = ${path}/database/files
rt_attr_string = path
rt_field = pathIndex
rt_attr_string = hash
rt_attr_bigint = size
}
index statistic
{
type = rt
path = ${path}/database/statistic
rt_attr_bigint = size
rt_attr_bigint = files
rt_attr_uint = torrents
}
searchd
{
listen = 9312
listen = 9306:mysql41
read_timeout = 5
max_children = 30
seamless_rotate = 1
preopen_indexes = 1
unlink_old = 1
workers = threads # for RT to work
pid_file = ${path}/searchd.pid
log = ${path}/searchd.log
query_log = ${path}/query.log
binlog_path = ${path}
}
`;
if (!fs.existsSync(`${path}/database`)){
fs.mkdirSync(`${path}/database`);
}
fs.writeFileSync(`${path}/sphinx.conf`, config)
console.log(`writed sphinx config to ${path}`)
}
const sphinxPath = path.resolve(getSphinxPath())
console.log('Sphinx Path:', sphinxPath)
let closerPath = sphinxPath.replace('searchd', 'consolekill')
if(/^win/.test(process.platform))
{
console.log('windows console closer path: ', closerPath)
console.log('cmd path', process.env.COMSPEC || 'cmd')
}
const startSphinx = (callback) => {
const sphinxConfigDirectory = app.getPath("userData")
writeSphinxConfig(sphinxConfigDirectory)
if(/^win/.test(process.platform))
sphinx = spawn(process.env.COMSPEC || 'cmd', ['/c', sphinxPath, '--config', `${sphinxConfigDirectory}/sphinx.conf`])
else
sphinx = spawn(sphinxPath, ['--config', `${sphinxConfigDirectory}/sphinx.conf`])
sphinx.stdout.on('data', (data) => {
console.log(`sphinx: ${data}`)
if (data.includes('accepting connections')) {
console.log('catched sphinx start')
if(callback)
callback()
}
})
sphinx.on('close', (code, signal) => {
console.log(`sphinx closed with code ${code} and signal ${signal}`)
app.quit()
})
}
let tray = undefined
app.on("ready", () => {
startSphinx(() => {
setApplicationMenu();
const mainWindow = createWindow("main", {
width: 1000,
height: 600
});
mainWindow.loadURL(
url.format({
pathname: path.join(__dirname, "app.html"),
protocol: "file:",
slashes: true
})
);
if (env.name === "development") {
mainWindow.openDevTools();
}
tray = new Tray('resources/icons/512x512.png')
tray.on('click', () => {
mainWindow.isVisible() ? mainWindow.hide() : mainWindow.show()
})
mainWindow.on('show', () => {
tray.setHighlightMode('always')
})
mainWindow.on('hide', () => {
tray.setHighlightMode('never')
})
mainWindow.on('minimize', (event) => {
event.preventDefault();
mainWindow.hide();
});
var contextMenu = Menu.buildFromTemplate([
{ label: 'Show', click: function(){
mainWindow.show();
} },
{ label: 'Quit', click: function(){
app.isQuiting = true;
if (sphinx)
stop()
else
app.quit()
} }
]);
tray.setContextMenu(contextMenu)
tray.setToolTip('Rats on The Boat search')
mainWindow.webContents.on('will-navigate', e => { e.preventDefault() })
mainWindow.webContents.on('new-window', (event, url, frameName) => {
if(frameName == '_self')
{
event.preventDefault()
mainWindow.loadURL(url)
}
})
spider = spiderCall((...data) => mainWindow.webContents.send(...data), (message, callback) => {
ipcMain.on(message, (event, arg) => {
if(Array.isArray(arg) && typeof arg[arg.length - 1] === 'object' && arg[arg.length - 1].callback)
{
const id = arg[arg.length - 1].callback
arg[arg.length - 1] = (responce) => {
mainWindow.webContents.send('callback', id, responce)
}
}
callback.apply(null, arg)
})
})
})
});
let stopProtect = false
const stop = () => {
if(stopProtect)
return
stopProtect = true
if(tray)
tray.destroy()
if(spider)
{
if(/^win/.test(process.platform))
spider.stop(() => exec(`${closerPath} ${sphinx.pid}`))
else
spider.stop(() => sphinx.kill())
}
else
{
if(/^win/.test(process.platform))
exec(`${closerPath} ${sphinx.pid}`)
else
sphinx.kill()
}
}
app.on("window-all-closed", () => {
if (sphinx)
stop()
else
app.quit()
});
app.on('before-quit', () => {
if (sphinx)
stop()
})

View File

@ -78,6 +78,9 @@ class Spider extends Emiter {
}
walk() {
if(this.closing)
return
if(!this.client || this.client.isIdle()) {
if(
!this.ignore
@ -230,7 +233,7 @@ class Spider extends Emiter {
this.parse(data, addr)
})
this.udp.on('error', (err) => {})
setInterval(() => {
this.joinInterval = setInterval(() => {
if(!this.client || this.client.isIdle()) {
this.join()
}
@ -246,7 +249,7 @@ class Spider extends Emiter {
{
trafficDebug('limitation', config.trafficMax / 1024, 'kbps/s')
let traffic = 0
setInterval(() => {
this.trafficInterval = setInterval(() => {
fs.readFile(path, (err, newTraffic) => {
if(err)
return
@ -264,6 +267,19 @@ class Spider extends Emiter {
}
}
}
close(callback)
{
clearInterval(this.joinInterval)
if(this.trafficInterval)
clearInterval(this.trafficInterval)
this.closing = true
this.udp.close(() => {
this.initialized = false
if(callback)
callback()
})
}
}
module.exports = Spider

View File

@ -1,5 +1,5 @@
let config = {
indexer: false,
indexer: true,
domain: 'ratsontheboat.org',
httpPort: 8095,

View File

@ -0,0 +1,3 @@
{
"indexer": true
}

View File

@ -0,0 +1,84 @@
// This helper remembers the size and position of your windows (and restores
// them in that place after app relaunch).
// Can be used for more than one window, just construct many
// instances of it and give each different name.
import { app, BrowserWindow, screen } from "electron";
import jetpack from "fs-jetpack";
export default (name, options) => {
const userDataDir = jetpack.cwd(app.getPath("userData"));
const stateStoreFile = `window-state-${name}.json`;
const defaultSize = {
width: options.width,
height: options.height
};
let state = {};
let win;
const restore = () => {
let restoredState = {};
try {
restoredState = userDataDir.read(stateStoreFile, "json");
} catch (err) {
// For some reason json can't be read (might be corrupted).
// No worries, we have defaults.
}
return Object.assign({}, defaultSize, restoredState);
};
const getCurrentPosition = () => {
const position = win.getPosition();
const size = win.getSize();
return {
x: position[0],
y: position[1],
width: size[0],
height: size[1]
};
};
const windowWithinBounds = (windowState, bounds) => {
return (
windowState.x >= bounds.x &&
windowState.y >= bounds.y &&
windowState.x + windowState.width <= bounds.x + bounds.width &&
windowState.y + windowState.height <= bounds.y + bounds.height
);
};
const resetToDefaults = () => {
const bounds = screen.getPrimaryDisplay().bounds;
return Object.assign({}, defaultSize, {
x: (bounds.width - defaultSize.width) / 2,
y: (bounds.height - defaultSize.height) / 2
});
};
const ensureVisibleOnSomeDisplay = windowState => {
const visible = screen.getAllDisplays().some(display => {
return windowWithinBounds(windowState, display.bounds);
});
if (!visible) {
// Window is partially or fully not visible now.
// Reset it to safe defaults.
return resetToDefaults();
}
return windowState;
};
const saveState = () => {
if (!win.isMinimized() && !win.isMaximized()) {
Object.assign(state, getCurrentPosition());
}
userDataDir.write(stateStoreFile, state, { atomic: true });
};
state = ensureVisibleOnSomeDisplay(restore());
win = new BrowserWindow(Object.assign({}, options, state));
win.on("close", saveState);
return win;
};

View File

@ -0,0 +1,28 @@
import { app, BrowserWindow } from "electron";
export const devMenuTemplate = {
label: "Development",
submenu: [
{
label: "Reload",
accelerator: "CmdOrCtrl+R",
click: () => {
BrowserWindow.getFocusedWindow().webContents.reloadIgnoringCache();
}
},
{
label: "Toggle DevTools",
accelerator: "Alt+CmdOrCtrl+I",
click: () => {
BrowserWindow.getFocusedWindow().toggleDevTools();
}
},
{
label: "Quit",
accelerator: "CmdOrCtrl+Q",
click: () => {
app.quit();
}
}
]
};

View File

@ -0,0 +1,12 @@
export const editMenuTemplate = {
label: "Edit",
submenu: [
{ label: "Undo", accelerator: "CmdOrCtrl+Z", selector: "undo:" },
{ label: "Redo", accelerator: "Shift+CmdOrCtrl+Z", selector: "redo:" },
{ type: "separator" },
{ label: "Cut", accelerator: "CmdOrCtrl+X", selector: "cut:" },
{ label: "Copy", accelerator: "CmdOrCtrl+C", selector: "copy:" },
{ label: "Paste", accelerator: "CmdOrCtrl+V", selector: "paste:" },
{ label: "Select All", accelerator: "CmdOrCtrl+A", selector: "selectAll:" }
]
};

View File

@ -4,14 +4,14 @@ const spider = new (require('./bt/spider'))(client)
const mysql = require('mysql');
const getPeersStatisticUDP = require('./bt/udp-tracker-request')
var express = require('express');
var app = express();
var server = require('http').Server(app);
var io = require('socket.io')(server);
var sm = require('sitemap');
var phantomjs = require('phantomjs-prebuilt')
//var express = require('express');
//var app = express();
//var server = require('http').Server(app);
//var io = require('socket.io')(server);
//var sm = require('sitemap');
//var phantomjs = require('phantomjs-prebuilt')
var ipaddr = require('ipaddr.js');
const disk = require('diskusage');
//const disk = require('diskusage');
const os = require('os');
let rootPath = os.platform() === 'win32' ? 'c:' : '/';
@ -21,18 +21,22 @@ const balanceDebug = _debug('main:balance');
const fakeTorrentsDebug = _debug('main:fakeTorrents');
const quotaDebug = _debug('main:quota');
const {torrentTypeDetect} = require('./src/content');
const {torrentTypeDetect} = require('../app/content');
// Start server
server.listen(config.httpPort);
console.log('Listening web server on', config.httpPort, 'port')
//server.listen(config.httpPort);
//console.log('Listening web server on', config.httpPort, 'port')
module.exports = function (send, recive)
{
let torrentsId = 1;
let filesId = 1;
let mysqlPool = mysql.createPool({
connectionLimit: config.mysql.connectionLimit,
host : config.mysql.host,
user : config.mysql.user,
password : config.mysql.password,
database : config.mysql.database
host : config.sphinx.host,
port : config.sphinx.port
});
let sphinx = mysql.createPool({
@ -63,10 +67,8 @@ const udpTrackers = [
let mysqlSingle;
function handleListenerDisconnect() {
mysqlSingle = mysql.createConnection({
host : config.mysql.host,
user : config.mysql.user,
password : config.mysql.password,
database : config.mysql.database
host : config.sphinx.host,
port : config.sphinx.port
});
mysqlSingle.connect(function(mysqlError) {
@ -74,6 +76,22 @@ function handleListenerDisconnect() {
console.error('error connecting: ' + mysqlError.stack);
return;
}
mysqlSingle.query("SELECT MAX(`id`) as mx from torrents", (err, rows) => {
if(err)
return
if(rows[0] && rows[0].mx >= 1)
torrentsId = rows[0].mx + 1;
})
mysqlSingle.query("SELECT MAX(`id`) as mx from files", (err, rows) => {
if(err)
return
if(rows[0] &&rows[0].mx >= 1)
filesId = rows[0].mx + 1;
})
});
mysqlSingle.on('error', function(err) {
@ -113,10 +131,27 @@ function handleListenerDisconnect() {
}
query.apply(mysqlSingle, args)
}
mysqlSingle.insertValues = (table, values, callback) => {
let names = '';
let data = '';
for(const val in values)
{
names += '`' + val + '`,';
data += mysqlSingle.escape(values[val]) + ',';
}
names = names.slice(0, -1)
data = data.slice(0, -1)
let query = `INSERT INTO ${table}(${names}) VALUES(${data})`;
if(callback)
return mysqlSingle.query(query, (...responce) => callback(...responce))
else
return mysqlSingle.query(query)
}
}
handleListenerDisconnect();
/*
app.use(express.static('build', {index: false}));
app.get('/sitemap.xml', function(req, res) {
@ -188,6 +223,7 @@ app.get('*', function(req, res)
res.sendfile(__dirname + '/build/index.html');
});
*/
// start
@ -217,9 +253,10 @@ setInterval(() => {
topCache = {};
}, 24 * 60 * 60 * 1000);
io.on('connection', function(socket)
{
socket.on('recentTorrents', function(callback)
//io.on('connection', function(socket)
//{
recive('recentTorrents', function(callback)
{
if(typeof callback != 'function')
return;
@ -239,22 +276,35 @@ io.on('connection', function(socket)
});
});
socket.on('statistic', function(callback)
recive('statistic', function(callback)
{
if(typeof callback != 'function')
return;
mysqlPool.query('SELECT * FROM `statistic`', function (error, rows, fields) {
mysqlPool.query('SELECT count(*) AS torrents, sum(size) AS sz FROM `torrents`', function (error, rows, fields) {
if(!rows) {
console.error(error)
callback(undefined)
return;
}
callback(rows[0])
let result = {torrents: rows[0].torrents || 0, size: rows[0].sz || 0}
mysqlPool.query('SELECT count(*) AS files FROM `files`', function (error, rows, fields) {
if(!rows) {
console.error(error)
callback(undefined)
return;
}
result.files = rows[0].files || 0
callback(result)
})
});
});
socket.on('torrent', function(hash, options, callback)
recive('torrent', function(hash, options, callback)
{
if(hash.length != 40)
return;
@ -283,7 +333,7 @@ io.on('connection', function(socket)
});
});
socket.on('searchTorrent', function(text, navigation, callback)
recive('searchTorrent', function(text, navigation, callback)
{
if(typeof callback != 'function')
return;
@ -309,11 +359,11 @@ io.on('connection', function(socket)
}
if(safeSearch)
{
where += " and contentcategory != 'xxx' ";
where += " and contentCategory != 'xxx' ";
}
if(navigation.type && navigation.type.length > 0)
{
where += ' and contenttype = ' + mysqlPool.escape(navigation.type) + ' ';
where += ' and contentType = ' + mysqlPool.escape(navigation.type) + ' ';
}
if(navigation.size)
{
@ -334,7 +384,7 @@ io.on('connection', function(socket)
let searchList = [];
//args.splice(orderBy && orderBy.length > 0 ? 1 : 0, 1);
//mysqlPool.query('SELECT * FROM `torrents` WHERE `name` like \'%' + text + '%\' ' + where + ' ' + order + ' LIMIT ?,?', args, function (error, rows, fields) {
sphinx.query('SELECT * FROM `torrents_index`,`torrents_index_delta` WHERE MATCH(?) ' + where + ' ' + order + ' LIMIT ?,?', args, function (error, rows, fields) {
sphinx.query('SELECT * FROM `torrents` WHERE MATCH(?) ' + where + ' ' + order + ' LIMIT ?,?', args, function (error, rows, fields) {
if(!rows) {
console.log(error)
callback(undefined)
@ -347,7 +397,7 @@ io.on('connection', function(socket)
});
});
socket.on('searchFiles', function(text, navigation, callback)
recive('searchFiles', function(text, navigation, callback)
{
if(typeof callback != 'function')
return;
@ -372,20 +422,21 @@ io.on('connection', function(socket)
args.splice(1, 0, orderBy);
order = 'ORDER BY ?? ' + orderDesc;
}
/*
if(safeSearch)
{
where += " and contentcategory != 'xxx' ";
where += " and contentCategory != 'xxx' ";
}
if(navigation.type && navigation.type.length > 0)
{
where += ' and contenttype = ' + mysqlPool.escape(navigation.type) + ' ';
where += ' and contentType = ' + mysqlPool.escape(navigation.type) + ' ';
}
if(navigation.size)
{
if(navigation.size.max > 0)
where += ' and size < ' + mysqlPool.escape(navigation.size.max) + ' ';
where += ' and torrentSize < ' + mysqlPool.escape(navigation.size.max) + ' ';
if(navigation.size.min > 0)
where += ' and size > ' + mysqlPool.escape(navigation.size.min) + ' ';
where += ' and torrentSize > ' + mysqlPool.escape(navigation.size.min) + ' ';
}
if(navigation.files)
{
@ -394,33 +445,48 @@ io.on('connection', function(socket)
if(navigation.files.min > 0)
where += ' and files > ' + mysqlPool.escape(navigation.files.min) + ' ';
}
*/
let search = {};
let searchList = [];
//args.splice(orderBy && orderBy.length > 0 ? 1 : 0, 1);
//mysqlPool.query('SELECT * FROM `files` inner join torrents on(torrents.hash = files.hash) WHERE files.path like \'%' + text + '%\' ' + where + ' ' + order + ' LIMIT ?,?', args, function (error, rows, fields) {
sphinx.query('SELECT * FROM `files_index`,`files_index_delta` WHERE MATCH(?) ' + where + ' ' + order + ' LIMIT ?,?', args, function (error, rows, fields) {
if(!rows) {
sphinx.query('SELECT * FROM `files` WHERE MATCH(?) ' + where + ' ' + order + ' LIMIT ?,?', args, function (error, files, fields) {
if(!files) {
console.log(error)
callback(undefined)
return;
}
rows.forEach((row) => {
if(!(row.hash in search))
if(files.length === 0)
{
callback(undefined)
return;
}
for(const file of files)
{
if(!search[file.hash])
{
let torrent = baseRowData(row);
search[row.hash] = torrent;
searchList.push(torrent)
search[file.hash] = { path: [] }
}
if(!search[row.hash].path)
search[row.hash].path = []
search[row.hash].path.push(row.path);
});
callback(searchList);
search[file.hash].path.push(file.path)
}
const inSql = Object.keys(search).map(hash => sphinx.escape(hash)).join(',');
sphinx.query(`SELECT * FROM torrents WHERE hash IN(${inSql})`, (err, torrents) => {
if(!torrents) {
console.log(err)
return;
}
for(const torrent of torrents)
{
search[torrent.hash] = Object.assign(torrent, search[torrent.hash])
}
callback(Object.values(search));
})
});
});
socket.on('checkTrackers', function(hash)
recive('checkTrackers', function(hash)
{
if(hash.length != 40)
return;
@ -428,7 +494,7 @@ io.on('connection', function(socket)
updateTorrentTrackers(hash);
});
socket.on('topTorrents', function(type, callback)
recive('topTorrents', function(type, callback)
{
let where = '';
let max = 20;
@ -439,19 +505,19 @@ io.on('connection', function(socket)
if(type == 'hours')
{
where = ' and `added` > DATE_SUB(NOW(), INTERVAL 24 HOUR) '
where = ' and `added` > ' + Math.floor(Date.now() / 1000) - (60 * 60 * 24)
}
if(type == 'week')
{
where = ' and `added` > DATE_SUB(NOW(), INTERVAL 7 DAY) '
where = ' and `added` > ' + Math.floor(Date.now() / 1000) - (60 * 60 * 24 * 7)
}
if(type == 'month')
{
where = ' and `added` > DATE_SUB(NOW(), INTERVAL 30 DAY) '
where = ' and `added` > ' + Math.floor(Date.now() / 1000) - (60 * 60 * 24 * 30)
}
}
const query = `SELECT * FROM torrents WHERE seeders > 0 and (contentCategory is null or contentCategory != 'xxx') ${where} ORDER BY seeders + leechers DESC LIMIT ${max}`;
const query = `SELECT * FROM torrents WHERE seeders > 0 and contentCategory != 'xxx' ${where} ORDER BY seeders DESC LIMIT ${max}`;
if(topCache[query])
{
callback(topCache[query]);
@ -471,7 +537,7 @@ io.on('connection', function(socket)
});
});
socket.on('admin', function(callback)
recive('admin', function(callback)
{
if(typeof callback != 'function')
return;
@ -481,7 +547,7 @@ io.on('connection', function(socket)
})
});
socket.on('setAdmin', function(options, callback)
recive('setAdmin', function(options, callback)
{
if(typeof options !== 'object')
return;
@ -513,7 +579,7 @@ io.on('connection', function(socket)
return ip
};
socket.on('vote', function(hash, isGood, callback)
recive('vote', function(hash, isGood, callback)
{
if(hash.length != 40)
return;
@ -552,7 +618,7 @@ io.on('connection', function(socket)
} else {
bad++;
}
io.sockets.emit('vote', {
send('vote', {
hash, good, bad
});
callback(true)
@ -561,7 +627,8 @@ io.on('connection', function(socket)
});
});
});
});
//});
let undoneQueries = 0;
let pushDatabaseBalance = () => {
@ -583,6 +650,7 @@ let popDatabaseBalance = () => {
};
// обновление статистики
/*
setInterval(() => {
let stats = {};
mysqlPool.query('SELECT COUNT(*) as tornum FROM `torrents`', function (error, rows, fields) {
@ -590,7 +658,7 @@ setInterval(() => {
mysqlPool.query('SELECT COUNT(*) as filesnum, SUM(`size`) as filesizes FROM `files`', function (error, rows, fields) {
stats.files = rows[0].filesnum;
stats.size = rows[0].filesizes;
io.sockets.emit('newStatistic', stats);
send('newStatistic', stats);
mysqlPool.query('DELETE FROM `statistic`', function (err, result) {
if(!result) {
console.error(err);
@ -604,12 +672,14 @@ setInterval(() => {
});
});
}, 10 * 60 * 1000)
*/
const updateTorrentTrackers = (hash) => {
let maxSeeders = 0, maxLeechers = 0, maxCompleted = 0;
mysqlSingle.query('UPDATE torrents SET trackersChecked = ? WHERE hash = ?', [new Date(), hash], function(err, result) {
mysqlSingle.query('UPDATE torrents SET trackersChecked = ? WHERE hash = ?', [Math.floor(Date.now() / 1000), hash], function(err, result) {
if(!result) {
console.error(err);
return
}
udpTrackers.forEach((tracker) => {
@ -634,12 +704,13 @@ const updateTorrentTrackers = (hash) => {
maxCompleted = completed;
let checkTime = new Date();
mysqlSingle.query('UPDATE torrents SET seeders = ?, completed = ?, leechers = ?, trackersChecked = ? WHERE hash = ?', [seeders, completed, leechers, checkTime, hash], function(err, result) {
mysqlSingle.query('UPDATE torrents SET seeders = ?, completed = ?, leechers = ?, trackersChecked = ? WHERE hash = ?', [seeders, completed, leechers, Math.floor(checkTime.getTime() / 1000), hash], function(err, result) {
if(!result) {
console.error(err);
return
}
io.sockets.emit('trackerTorrentUpdate', {
send('trackerTorrentUpdate', {
hash,
seeders,
completed,
@ -656,6 +727,7 @@ const cleanupTorrents = (cleanTorrents = 1) => {
if(!config.cleanup)
return;
/*
disk.check(rootPath, function(err, info) {
if (err) {
console.log(err);
@ -685,6 +757,7 @@ const cleanupTorrents = (cleanTorrents = 1) => {
cleanupDebug('enough free space', (free / (1024 * 1024)) + "mb");
}
});
*/
}
const updateTorrent = (metadata, infohash, rinfo) => {
@ -704,8 +777,10 @@ const updateTorrent = (metadata, infohash, rinfo) => {
let file = metadata.info.files[i];
let filePath = file.path.join('/');
let fileQ = {
id: filesId++,
hash: hash,
path: filePath,
pathIndex: filePath,
size: file.length,
};
filesArray.push(fileQ);
@ -715,8 +790,10 @@ const updateTorrent = (metadata, infohash, rinfo) => {
else
{
let fileQ = {
id: filesId++,
hash: hash,
path: metadata.info.name,
pathIndex: metadata.info.name,
size: size,
};
filesArray.push(fileQ);
@ -724,18 +801,27 @@ const updateTorrent = (metadata, infohash, rinfo) => {
let filesToAdd = filesArray.length;
mysqlSingle.query('SELECT count(*) as files_count FROM files WHERE hash = ?', [hash], function(err, rows) {
if(!rows)
return
const db_files = rows[0]['files_count'];
if(db_files !== filesCount)
{
mysqlSingle.query('DELETE FROM files WHERE hash = ?', hash, function (err, result) {
if(err)
{
return;
}
filesArray.forEach((file) => {
mysqlSingle.query('INSERT INTO files SET ?', file, function(err, result) {
mysqlSingle.insertValues('files', file, function(err, result) {
if(!result) {
console.log(file);
console.error(err);
return
}
if(--filesToAdd === 0) {
io.sockets.emit('filesReady', hash);
send('filesReady', hash);
}
});
});
@ -744,36 +830,52 @@ const updateTorrent = (metadata, infohash, rinfo) => {
})
var torrentQ = {
id: torrentsId++,
hash: hash,
name: metadata.info.name,
nameIndex: metadata.info.name,
size: size,
files: filesCount,
piecelength: metadata.info['piece length'],
ipv4: rinfo.address,
port: rinfo.port
port: rinfo.port,
added: Math.floor(Date.now() / 1000)
};
torrentTypeDetect(torrentQ, filesArray);
var query = mysqlSingle.query('INSERT INTO torrents SET ? ON DUPLICATE KEY UPDATE hash=hash', torrentQ, function(err, result) {
if(result) {
io.sockets.emit('newTorrent', {
hash: hash,
name: metadata.info.name,
size: size,
files: filesCount,
piecelength: metadata.info['piece length'],
contentType: torrentQ.contentType,
contentCategory: torrentQ.contentCategory,
});
updateTorrentTrackers(hash);
}
else
{
console.log(torrentQ);
console.error(err);
}
});
mysqlSingle.query("SELECT id FROM torrents WHERE hash = ?", hash, (err, single) => {
if(!single)
{
console.log(err)
return
}
if(single.length > 0)
{
return
}
mysqlSingle.insertValues('torrents', torrentQ, function(err, result) {
if(result) {
send('newTorrent', {
hash: hash,
name: metadata.info.name,
size: size,
files: filesCount,
piecelength: metadata.info['piece length'],
contentType: torrentQ.contentType,
contentCategory: torrentQ.contentCategory,
});
updateTorrentTrackers(hash);
}
else
{
console.log(torrentQ);
console.error(err);
}
});
})
}
client.on('complete', function (metadata, infohash, rinfo) {
@ -819,7 +921,7 @@ function showFakeTorrentsPage(page)
torrents.forEach((torrent, index) => {
const fk = fakeTorrents.push(setTimeout(() => {
delete fakeTorrents[fk-1];
io.sockets.emit('newTorrent', baseRowData(torrent));
send('newTorrent', baseRowData(torrent));
updateTorrentTrackers(torrent.hash);
fakeTorrentsDebug('fake torrent', torrents.name, 'index, page:', index, page);
}, 700 * index))
@ -863,4 +965,15 @@ if(config.cleanup && config.indexer)
if(config.spaceQuota)
{
quotaDebug('disk quota enabled');
}
this.stop = (callback) => {
console.log('closing spider')
mysqlPool.end(() => spider.close(() => {
mysqlSingle.destroy()
callback()
}))
}
return this
}