diff --git a/.travis.yml b/.travis.yml index 09466701..48416eec 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,8 +1,8 @@ language: php services: mysql php: - - "5.4" - "5.6" + - "7.0" env: - DB=mysql before_script: diff --git a/app/Tdt/Core/ContentNegotiator.php b/app/Tdt/Core/ContentNegotiator.php index 122fdac7..bdeb9324 100644 --- a/app/Tdt/Core/ContentNegotiator.php +++ b/app/Tdt/Core/ContentNegotiator.php @@ -57,13 +57,13 @@ public static function getResponse($data, $extension = null) $formatter_class = 'Tdt\\Core\\Formatters\\' . $extension . 'Formatter'; // Exception for XML, only XML or HTML is allowed - if ($data->definition['source_type'] == 'XmlDefinition' && !empty($extension) && $data->source_definition['geo_formatted'] != 1) { - if ($extension != "XML" && $extension != "HTML" && $extension != "PHP") { - \App::abort(406, "XML only allows to be formatted in XML, HTML or PHP serialization."); - } elseif ($extension == "XML") { - return self::createXmlResponse($data->data); - } - } +// if ($data->definition['source_type'] == 'XmlDefinition' && !empty($extension) && $data->source_definition['geo_formatted'] != 1) { +// if ($extension != "XML" && $extension != "HTML" && $extension != "PHP") { +// \App::abort(406, "XML only allows to be formatted in XML, HTML or PHP serialization."); +// } elseif ($extension == "XML") { +// return self::createXmlResponse($data->data); +// } +// } if (empty($extension)) { $negotiator = new FormatNegotiator(); diff --git a/app/Tdt/Core/DataControllers/ELASTICSEARCHController.php b/app/Tdt/Core/DataControllers/ELASTICSEARCHController.php index 3add24c7..675ad61c 100644 --- a/app/Tdt/Core/DataControllers/ELASTICSEARCHController.php +++ b/app/Tdt/Core/DataControllers/ELASTICSEARCHController.php @@ -5,11 +5,8 @@ use Tdt\Core\Datasets\Data; use Tdt\Core\Pager; use Elastica\Client; -use Elastica\Document; -use Elastica\Query\Term; use Elastica\Search; use Elastica\Query; -use Elastica\Exception\ResponseException; use Elastica\Query\SimpleQueryString; use Elastica\Query\MatchAll; @@ -82,9 +79,9 @@ public function readData($source_definition, $rest_parameters = []) public static function getParameters() { $query_params = [ - "query" => [ - "required" => false, - "description" => "A value that will be used to perform a full text-search on the data." + 'query' => [ + 'required' => false, + 'description' => 'A value that will be used to perform a full text-search on the data.' ] ]; diff --git a/app/Tdt/Core/DataControllers/JSONController.php b/app/Tdt/Core/DataControllers/JSONController.php index 8c233f77..65a4e38f 100644 --- a/app/Tdt/Core/DataControllers/JSONController.php +++ b/app/Tdt/Core/DataControllers/JSONController.php @@ -4,11 +4,12 @@ use Tdt\Core\Cache\Cache; use Tdt\Core\Datasets\Data; -use Symfony\Component\HttpFoundation\Request; use ML\JsonLD\JsonLD; use ML\JsonLD\NQuads; use EasyRdf\Graph; +ini_set('default_socket_timeout', 5); + /** * JSON Controller * @@ -93,15 +94,22 @@ private function getPlainJson($uri) return Cache::get($uri); } - if (!filter_var($uri, FILTER_VALIDATE_URL) === false) { + $config = stream_context_create(array( + 'http' => array( + 'method' => 'GET', + 'timeout' => 35, + ) + )); + + if (! filter_var($uri, FILTER_VALIDATE_URL) === false) { $parts = parse_url($uri); if ($parts['scheme'] != 'file') { $data = $this->getRemoteData($uri); } else { - $data =@ file_get_contents($uri); + $data = @ file_get_contents($uri, 0, $config); } } else { - $data =@ file_get_contents($uri); + $data = @ file_get_contents($uri, 0, $config); } if ($data) { @@ -166,7 +174,7 @@ private function getRemoteData($url) curl_setopt($c, CURLOPT_SSL_VERIFYHOST, false); curl_setopt($c, CURLOPT_SSL_VERIFYPEER, false); curl_setopt($c, CURLOPT_MAXREDIRS, 10); - $follow_allowed= ( ini_get('open_basedir') || ini_get('safe_mode')) ? false:true; + $follow_allowed = ( ini_get('open_basedir') || ini_get('safe_mode')) ? false : true; if ($follow_allowed) { curl_setopt($c, CURLOPT_FOLLOWLOCATION, 1); @@ -174,7 +182,7 @@ private function getRemoteData($url) curl_setopt($c, CURLOPT_CONNECTTIMEOUT, 9); curl_setopt($c, CURLOPT_REFERER, $url); - curl_setopt($c, CURLOPT_TIMEOUT, 60); + curl_setopt($c, CURLOPT_TIMEOUT, 35); curl_setopt($c, CURLOPT_AUTOREFERER, true); curl_setopt($c, CURLOPT_ENCODING, 'gzip,deflate'); $data = curl_exec($c); @@ -189,7 +197,9 @@ private function getRemoteData($url) if ($status['http_code'] == 200) { return $data; } elseif ($status['http_code'] == 301 || $status['http_code'] == 302) { - \App::abort(400, "The JSON URL redirected us to a different URI."); + \App::abort(400, 'The JSON URL redirected us to a different URI.'); + } elseif ($status > 300) { + \App::abort(400, 'The JSON source is not available at this moment.'); } return $data; @@ -197,7 +207,7 @@ private function getRemoteData($url) protected function remapBnode($name, $graph) { - if (!isset($this->bnodeMap[$name])) { + if (! isset($this->bnodeMap[$name])) { $this->bnodeMap[$name] = $graph->newBNodeId(); } return $this->bnodeMap[$name]; diff --git a/app/Tdt/Core/DataControllers/MYSQLController.php b/app/Tdt/Core/DataControllers/MYSQLController.php index 202f2f7f..25bba8fe 100644 --- a/app/Tdt/Core/DataControllers/MYSQLController.php +++ b/app/Tdt/Core/DataControllers/MYSQLController.php @@ -37,7 +37,7 @@ public function readData($source_definition, $rest_parameters = array()) } // Disregard the paging when rest parameters are given - if (!empty($rest_parameters)) { + if (! empty($rest_parameters)) { $limit = 500; $offset = 0; } @@ -54,10 +54,10 @@ public function readData($source_definition, $rest_parameters = array()) $geo[$geo_prop['property']] = $geo_prop['path']; } - if (!$columns) { + if (! $columns) { // 500 error because this shouldn't happen in normal conditions // Columns are parsed upon adding a CSV resource and are always present - \App::abort(500, "Cannot find the columns of the MySQL table file, this might be due to a corrupted database or a broken configuration."); + \App::abort(500, 'Cannot find the columns of the MySQL table file, this might be due to a corrupted database or a broken configuration.'); } // Create aliases for the columns @@ -65,21 +65,21 @@ public function readData($source_definition, $rest_parameters = array()) $pk = null; foreach ($columns as $column) { - - if (!empty($column['is_pk'])) { + if (! empty($column['is_pk'])) { $pk = $column['column_name_alias']; } } // Connect to the database $db_config = array( - 'driver' => 'mysql', - 'host' => $source_definition['host'], - 'database' => $source_definition['database'], - 'username' => $source_definition['username'], - 'password' => $source_definition['password'], 'charset' => 'utf8', 'collation' => $source_definition['collation'], + 'database' => $source_definition['database'], + 'driver' => 'mysql', + 'host' => $source_definition['mysql_host'], + 'password' => $source_definition['mysql_password'], + 'port' => $source_definition['mysql_port'], + 'username' => $source_definition['mysql_username'], ); // Configure a connection @@ -92,7 +92,7 @@ public function readData($source_definition, $rest_parameters = array()) $query = $source_definition['query']; // Get the total amount of records for the query for pagination - preg_match("/select.*?(from.*)/msi", $query, $matches); + preg_match('/select.*?(from.*)/msi', $query, $matches); if (empty($matches[1])) { \App::abort(400, 'Failed to make a count statement, make sure the SQL query is valid.'); @@ -104,20 +104,19 @@ public function readData($source_definition, $rest_parameters = array()) $total_rows = $count_result[0]->count; - if (!$limitInQuery) { - if (!empty($limit)) { + if (! $limitInQuery) { + if (! empty($limit)) { $query .= ' limit ' . $limit; } - if (!empty($offset)) { + if (! empty($offset)) { $query .= ' offset ' . $offset; } } $result = $db->select($query); - } catch (QueryException $ex) { - \App::abort(400, "A bad query has been made, make sure all passed statements are SQL friendly. The error message was: " . $ex->getMessage()); + \App::abort(400, 'A bad query has been made, make sure all passed statements are SQL friendly. The error message was: ' . $ex->getMessage()); } // Get the paging headers diff --git a/app/Tdt/Core/DataControllers/SHPController.php b/app/Tdt/Core/DataControllers/SHPController.php index 8f34deb1..c8019c90 100644 --- a/app/Tdt/Core/DataControllers/SHPController.php +++ b/app/Tdt/Core/DataControllers/SHPController.php @@ -27,15 +27,15 @@ class SHPController extends ADataController private $geo_property; private static $RECORD_TYPES = [ - 0 => "Null Shape", - 1 => "Point", - 3 => "PolyLine", - 5 => "Polygon", - 8 => "MultiPoint", - 11 => "PointZ", - 13 => "PolyLineZ", - 15 => "PolygonZ", - 18 => "MultiPointZ" + 0 => 'Null Shape', + 1 => 'Point', + 3 => 'PolyLine', + 5 => 'Polygon', + 8 => 'MultiPoint', + 11 => 'PointZ', + 13 => 'PolyLineZ', + 15 => 'PolygonZ', + 18 => 'MultiPointZ' ]; public function __construct( @@ -57,7 +57,7 @@ public function readData($source_definition, $rest_parameters = array()) list($limit, $offset) = Pager::calculateLimitAndOffset(); // Disregard the paging when rest parameters are given - if (!empty($rest_parameters)) { + if (! empty($rest_parameters)) { $limit = PHP_INT_MAX; $offset = 0; } @@ -74,7 +74,7 @@ public function readData($source_definition, $rest_parameters = array()) if (empty($tmp_path)) { // If this occurs then the server is not configured correctly, thus a 500 error is thrown - \App::abort(500, "The temp directory, retrieved by the operating system, could not be retrieved."); + \App::abort(500, 'The temp directory, retrieved by the operating system, could not be retrieved.'); } // Get the columns @@ -89,8 +89,8 @@ public function readData($source_definition, $rest_parameters = array()) $geo[$geo_prop['property']] = $geo_prop['path']; } - if (!$columns) { - \App::abort(500, "Cannot find the columns of the SHP definition."); + if (! $columns) { + \App::abort(500, 'Cannot find the columns of the SHP definition.'); } try { @@ -98,19 +98,19 @@ public function readData($source_definition, $rest_parameters = array()) $arrayOfRowObjects = array(); // Prepare the options to read the SHP file - $is_url = (substr($uri, 0, 4) == "http"); + $is_url = (substr($uri, 0, 4) == 'http'); // If the shape files are located on an HTTP address, fetch them and store them locally if ($is_url) { $tmp_file_name = uniqid(); - $tmp_file = $tmp_path . "/" . $tmp_file_name; + $tmp_file = $tmp_path . '/' . $tmp_file_name; - file_put_contents($tmp_file . ".shp", file_get_contents(substr($uri, 0, strlen($uri) - 4) . ".shp")); - file_put_contents($tmp_file . ".dbf", file_get_contents(substr($uri, 0, strlen($uri) - 4) . ".dbf")); - file_put_contents($tmp_file . ".shx", file_get_contents(substr($uri, 0, strlen($uri) - 4) . ".shx")); + file_put_contents($tmp_file . '.shp', file_get_contents(substr($uri, 0, strlen($uri) - 4) . '.shp')); + file_put_contents($tmp_file . '.dbf', file_get_contents(substr($uri, 0, strlen($uri) - 4) . '.dbf')); + file_put_contents($tmp_file . '.shx', file_get_contents(substr($uri, 0, strlen($uri) - 4) . '.shx')); // Along this file the class will use file.shx and file.dbf - $shape_file = new ShapeFile($tmp_file . ".shp"); + $shape_file = new ShapeFile($tmp_file . '.shp'); } else { $shape_file = new ShapeFile($uri); // along this file the class will use file.shx and file.dbf } @@ -146,7 +146,7 @@ public function readData($source_definition, $rest_parameters = array()) $projCode = $projection['projection']; if (empty($projCode)) { - \App::abort(400, "Could not find a supported EPSG code."); + \App::abort(400, 'Could not find a supported EPSG code.'); } $this->proj4 = new Proj4php(); @@ -219,8 +219,8 @@ private function parsePoint($shp_data) $x = $shp_data['x']; $y = $shp_data['y']; - if (!empty($x) && !empty($y)) { - if (!empty($this->epsg) && $this->epsg != 4326) { + if (! empty($x) && ! empty($y)) { + if (! empty($this->epsg) && $this->epsg != 4326) { $pointSrc = new Point($x, $y); $pointDest = $this->proj4->transform($this->projSrc, $this->projDest, $pointSrc); @@ -242,8 +242,8 @@ private function parsePointZ($shp_data) $y = $shp_data['y']; $z = $shp_data['z']; - if (!empty($x) && !empty($y) && !empty($z)) { - if (!empty($this->epsg) && $this->epsg != 4326) { + if (! empty($x) && ! empty($y) && ! empty($z)) { + if (! empty($this->epsg) && $this->epsg != 4326) { $pointSrc = new Point($x, $y, $z); $pointDest = $this->proj4->transform($this->projSrc, $this->projDest, $pointSrc); @@ -272,7 +272,7 @@ private function parsePolyline($shp_data) $y = $point['y']; // Translate the coordinates to WSG84 geo coordinates - if (!empty($this->epsg)) { + if (! empty($this->epsg)) { $pointSrc = new Point($x, $y); $pointDest = $this->proj4->transform($this->projSrc, $this->projDest, $pointSrc); @@ -282,7 +282,7 @@ private function parsePolyline($shp_data) $points[] = $x . ',' . $y; } - array_push($parts, implode(" ", $points)); + array_push($parts, implode(' ', $points)); } return implode(';', $parts); @@ -301,7 +301,7 @@ private function parsePolylineZ($shp_data) $z = $point['z']; // Translate the coordinates to WSG84 geo coordinates - if (!empty($this->epsg)) { + if (! empty($this->epsg)) { $pointSrc = new Point($x, $y, $z); $pointDest = $this->proj4->transform($this->projSrc, $this->projDest, $pointSrc); @@ -311,7 +311,7 @@ private function parsePolylineZ($shp_data) $points[] = $x . ',' . $y . ',' . $z; } - array_push($parts, implode(" ", $points)); + array_push($parts, implode(' ', $points)); } return implode(';', $parts); @@ -332,7 +332,7 @@ private function parsePolygon($shp_data) $y = $point['y']; // Translate the coordinates to WSG84 geo coordinates - if (!empty($this->epsg)) { + if (! empty($this->epsg)) { $pointSrc = new Point($x, $y); $pointDest = $this->proj4->transform($this->projSrc, $this->projDest, $pointSrc); @@ -343,7 +343,7 @@ private function parsePolygon($shp_data) $points[] = $x . ',' . $y; } - array_push($parts, implode(" ", $points)); + array_push($parts, implode(' ', $points)); } return $parts = implode(';', $parts); @@ -365,7 +365,7 @@ private function parsePolygonZ($shp_data) $z = $point['z']; // Translate the coordinates to WSG84 geo coordinates - if (!empty($this->epsg)) { + if (! empty($this->epsg)) { $pointSrc = new Point($x, $y, $z); $pointDest = $this->proj4->transform($this->projSrc, $this->projDest, $pointSrc); @@ -376,7 +376,7 @@ private function parsePolygonZ($shp_data) $points[] = $x . ',' . $y . ',' . $z; } - array_push($parts, implode(" ", $points)); + array_push($parts, implode(' ', $points)); } return $parts = implode(';', $parts); @@ -388,8 +388,8 @@ private function parseMultipoint($shp_data) $x = $point['x']; $y = $point['y']; - if (!empty($x) && !empty($y)) { - if (!empty($this->epsg)) { + if (! empty($x) && ! empty($y)) { + if (! empty($this->epsg)) { $pointSrc = new Point($x, $y); $pointDest = $this->proj4->transform($this->projSrc, $this->projDest, $pointSrc); @@ -412,8 +412,8 @@ private function parseMultipointZ($shp_data) $y = $point['y']; $z = $point['z']; - if (!empty($x) && !empty($y) && !empty($z)) { - if (!empty($this->epsg)) { + if (! empty($x) && ! empty($y) && ! empty($z)) { + if (! empty($this->epsg)) { $pointSrc = new Point($x, $y, $z); $pointDest = $this->proj4->transform($this->projSrc, $this->projDest, $pointSrc); @@ -435,7 +435,7 @@ private function parseMultipointZ($shp_data) */ public static function parseColumns($options) { - $is_url = (substr($options['uri'], 0, 4) == "http"); + $is_url = (substr($options['uri'], 0, 4) == 'http'); $tmp_dir = sys_get_temp_dir(); $columns = array(); @@ -446,14 +446,14 @@ public static function parseColumns($options) // This remains untested $tmp_file = uniqid(); - file_put_contents($tmp_dir . '/' . $tmp_file . ".shp", file_get_contents(substr($options['uri'], 0, strlen($options['uri']) - 4) . ".shp")); - file_put_contents($tmp_dir . '/' . $tmp_file . ".dbf", file_get_contents(substr($options['uri'], 0, strlen($options['uri']) - 4) . ".dbf")); - file_put_contents($tmp_dir . '/' . $tmp_file . ".shx", file_get_contents(substr($options['uri'], 0, strlen($options['uri']) - 4) . ".shx")); + file_put_contents($tmp_dir . '/' . $tmp_file . '.shp', file_get_contents(substr($options['uri'], 0, strlen($options['uri']) - 4) . '.shp')); + file_put_contents($tmp_dir . '/' . $tmp_file . '.dbf', file_get_contents(substr($options['uri'], 0, strlen($options['uri']) - 4) . '.dbf')); + file_put_contents($tmp_dir . '/' . $tmp_file . '.shx', file_get_contents(substr($options['uri'], 0, strlen($options['uri']) - 4) . '.shx')); // Along this file the class will use file.shx and file.dbf - $shape_file = new Shapefile($tmp_dir . '/' . $tmp_file . ".shp", array('noparts' => false)); + $shape_file = new Shapefile($tmp_dir . '/' . $tmp_file . '.shp', array('noparts' => false)); } else { - // along this file the class will use file.shx and file.dbf + // along this file the class will use file.shx and file.dbf $shape_file = new Shapefile($options['uri'], array('noparts' => false)); } } catch (Exception $e) { @@ -463,7 +463,7 @@ public static function parseColumns($options) $record = $shape_file->getRecord(); // Read meta data - if (!$record) { + if (! $record) { $uri = $options['uri']; \App::abort(400, "We failed to retrieve a record from the provided shape file on uri $uri, make sure the corresponding dbf and shx files are at the same location."); } @@ -517,7 +517,6 @@ public static function parseColumns($options) return $columns; } - /** * Parse the geo column names out of a SHP file. */ @@ -530,18 +529,18 @@ public static function parseGeoProperty($options, $columns) $aliases[$column['column_name']] = $column['column_name_alias']; } - $is_url = (substr($options['uri'], 0, 4) == "http"); + $is_url = (substr($options['uri'], 0, 4) == 'http'); $tmp_dir = sys_get_temp_dir(); $geo_properties = array(); if ($is_url) { // This remains untested $tmp_file = uniqid(); - file_put_contents($tmp_dir . '/' . $tmp_file . ".shp", file_get_contents(substr($options['uri'], 0, strlen($options['uri']) - 4) . ".shp")); - file_put_contents($tmp_dir . '/' . $tmp_file . ".dbf", file_get_contents(substr($options['uri'], 0, strlen($options['uri']) - 4) . ".dbf")); - file_put_contents($tmp_dir . '/' . $tmp_file . ".shx", file_get_contents(substr($options['uri'], 0, strlen($options['uri']) - 4) . ".shx")); + file_put_contents($tmp_dir . '/' . $tmp_file . '.shp', file_get_contents(substr($options['uri'], 0, strlen($options['uri']) - 4) . '.shp')); + file_put_contents($tmp_dir . '/' . $tmp_file . '.dbf', file_get_contents(substr($options['uri'], 0, strlen($options['uri']) - 4) . '.dbf')); + file_put_contents($tmp_dir . '/' . $tmp_file . '.shx', file_get_contents(substr($options['uri'], 0, strlen($options['uri']) - 4) . '.shx')); - $shape_file = new ShapeFile($tmp_dir . '/' . $tmp_file . ".shp", array('noparts' => false)); + $shape_file = new ShapeFile($tmp_dir . '/' . $tmp_file . '.shp', array('noparts' => false)); } else { $shape_file = new ShapeFile($options['uri'], array('noparts' => false)); } @@ -549,7 +548,7 @@ public static function parseGeoProperty($options, $columns) $record = $shape_file->getRecord(); // read meta data - if (!$record) { + if (! $record) { $uri = $options['uri']; \App::abort(400, "We failed to retrieve a record from the provided shape file on uri $uri, make sure the corresponding dbf and shx files are at the same location."); } diff --git a/app/Tdt/Core/DataControllers/XLSController.php b/app/Tdt/Core/DataControllers/XLSController.php index 1ea255c7..d303a22b 100644 --- a/app/Tdt/Core/DataControllers/XLSController.php +++ b/app/Tdt/Core/DataControllers/XLSController.php @@ -40,7 +40,7 @@ public function readData($source_definition, $rest_parameters = array()) list($limit, $offset) = Pager::calculateLimitAndOffset(); // Disregard the paging when rest parameters are given - if (!empty($rest_parameters)) { + if (! empty($rest_parameters)) { $limit = PHP_INT_MAX; $offset = 0; } @@ -85,7 +85,7 @@ public function readData($source_definition, $rest_parameters = array()) $columns = $parsed_columns; if (empty($columns)) { - \App::abort(500, "Cannot find the columns from the XLS definition."); + \App::abort(500, 'Cannot find the columns from the XLS definition.'); } // Create aliases for the columns @@ -93,7 +93,7 @@ public function readData($source_definition, $rest_parameters = array()) $pk = null; foreach ($columns as $column) { - if (!empty($column['is_pk'])) { + if (! empty($column['is_pk'])) { $pk = $column['column_name_alias']; } } @@ -105,14 +105,14 @@ public function readData($source_definition, $rest_parameters = array()) $tmp_path = sys_get_temp_dir(); if (empty($tmp_path)) { - \App::abort(500, "The temp directory, retrieved by the operating system, could not be retrieved."); + \App::abort(500, 'The temp directory, retrieved by the operating system, could not be retrieved.'); } try { - if (substr($uri, 0, 4) == "http") { + if (substr($uri, 0, 4) == 'http') { $tmpFile = uniqid(); - file_put_contents($tmp_path . "/" . $tmpFile, file_get_contents($uri)); - $php_obj = self::loadExcel($tmp_path . "/" . $tmpFile, $this->getFileExtension($uri), $sheet); + file_put_contents($tmp_path . '/' . $tmpFile, file_get_contents($uri)); + $php_obj = self::loadExcel($tmp_path . '/' . $tmpFile, $this->getFileExtension($uri), $sheet); } else { $php_obj = self::loadExcel($uri, $this->getFileExtension($uri), $sheet); @@ -166,7 +166,7 @@ public function readData($source_definition, $rest_parameters = array()) } else { if (empty($row_objects[$rowobject->$pk])) { $row_objects[$rowobject->$pk] = $rowobject; - } elseif (!empty($row_objects[$rowobject->$pk])) { + } elseif (! empty($row_objects[$rowobject->$pk])) { $double = $rowobject->$pk; \Log::info("The primary key $double has been used already for another record!"); } else { @@ -188,7 +188,6 @@ public function readData($source_definition, $rest_parameters = array()) $data_result->preferred_formats = $this->getPreferredFormats(); return $data_result; - } catch (Exception $ex) { App::abort(500, "Failed to retrieve data from the XLS file on location $uri."); } @@ -204,22 +203,24 @@ public static function getFileExtension($file) /** * Create an Excel PHP Reader object from the Excel sheet. + * + * @return Reader */ public static function loadExcel($file, $type, $sheet) { - - if ($type == "xls") { + if ($type == 'xls') { $reader = IOFactory::createReader('Excel5'); - } elseif ($type == "xlsx") { + } elseif ($type == 'xlsx') { $reader = IOFactory::createReader('Excel2007'); } else { - \App::abort(500, "The given file is not supported, supported file are xls or xlsx files."); + \App::abort(500, 'The given file is not supported, supported file are xls or xlsx files.'); } $reader->setReadDataOnly(true); + $file = str_replace('file://', '', $file); $sheet_info = $reader->listWorkSheetinfo($file); - if (empty($sheet) && !empty($sheet_info)) { + if (empty($sheet) && ! empty($sheet_info)) { $first_sheet_info = $sheet_info[0]; $sheet = $first_sheet_info['worksheetName']; @@ -236,7 +237,7 @@ public static function loadExcel($file, $type, $sheet) if (empty(self::$sheet)) { if (empty($sheet)) { - \App::abort(404, "No sheets were found in the XLS file."); + \App::abort(404, 'No sheets were found in the XLS file.'); } else { \App::abort(404, "The sheet provided ($sheet) has not been found."); } @@ -258,7 +259,7 @@ private function createValues($columns, $data) foreach ($columns as $column) { $value = @$data[$column['index']]; - if (!is_null($value)) { + if (! is_null($value)) { $result[$column['column_name_alias']] = $data[$column['index']]; } else { $index = $column['index']; @@ -280,7 +281,7 @@ public static function parseColumns($input) $aliases = array(); - if (!empty($columns_info)) { + if (! empty($columns_info)) { foreach ($columns_info as $column_info) { $aliases[$column_info['index']] = $column_info['column_name_alias']; } @@ -290,18 +291,18 @@ public static function parseColumns($input) $tmp_dir = sys_get_temp_dir(); if (empty($columns)) { - if (!is_dir($tmp_dir)) { + if (! is_dir($tmp_dir)) { mkdir($tmp_dir); } - $is_uri = (substr($input['uri'], 0, 4) == "http"); + $is_uri = (substr($input['uri'], 0, 4) == 'http'); try { if ($is_uri) { $tmp_file = uniqid(); - file_put_contents($tmp_dir. "/" . $tmp_file, file_get_contents($input['uri'])); - $php_obj = self::loadExcel($tmp_dir ."/" . $tmp_file, self::getFileExtension($input['uri']), $input['sheet']); + file_put_contents($tmp_dir . '/' . $tmp_file, file_get_contents($input['uri'])); + $php_obj = self::loadExcel($tmp_dir . '/' . $tmp_file, self::getFileExtension($input['uri']), $input['sheet']); } else { $php_obj = self::loadExcel($input['uri'], self::getFileExtension($input['uri']), $input['sheet']); } @@ -313,7 +314,6 @@ public static function parseColumns($input) \App::abort(404, "Something went wrong whilst retrieving the Excel file from uri $uri."); } - if (is_null($worksheet)) { \App::abort(404, "The sheet with name, self::$sheet, has not been found in the Excel file."); } @@ -331,7 +331,7 @@ public static function parseColumns($input) foreach ($cell_iterator as $cell) { $column_name = ''; - if ($cell->getValue() != "") { + if ($cell->getValue() != '') { $column_name = trim($cell->getCalculatedValue()); } else { @@ -371,7 +371,7 @@ public static function parseColumns($input) $php_obj->disconnectWorksheets(); if ($is_uri) { - unlink($tmp_dir . "/" . $tmp_file); + unlink($tmp_dir . '/' . $tmp_file); } } diff --git a/app/Tdt/Core/DataControllers/XMLController.php b/app/Tdt/Core/DataControllers/XMLController.php index cdf2946b..3a4a8ac3 100644 --- a/app/Tdt/Core/DataControllers/XMLController.php +++ b/app/Tdt/Core/DataControllers/XMLController.php @@ -4,8 +4,8 @@ use Tdt\Core\Cache\Cache; use Tdt\Core\Datasets\Data; -use Symfony\Component\HttpFoundation\Request; -use Tdt\Core\utils\XMLSerializer; + +ini_set('default_socket_timeout', 5); /** * XML Controller @@ -33,10 +33,17 @@ public function readData($source_definition, $rest_parameters = array()) if (Cache::has($uri)) { $data = Cache::get($uri); } else { + $config = stream_context_create(array( + 'http' => array( + 'method' => 'GET', + 'timeout' => 2, + ) + )); + // Fetch the data - $data =@ file_get_contents($uri); + $data = @ file_get_contents($uri, 0, $config); - if (!empty($data)) { + if (! empty($data)) { Cache::put($uri, $data, $source_definition['cache']); } else { $uri = $source_definition['uri']; @@ -49,7 +56,7 @@ public function readData($source_definition, $rest_parameters = array()) $data_result->semantic = $this->prefixes; $data_result->preferred_formats = $this->getPreferredFormats(); - if (!empty($source_definition['geo_formatted']) && $source_definition['geo_formatted']) { + if (! empty($source_definition['geo_formatted']) && $source_definition['geo_formatted']) { $data_result->geo_formatted = true; $data_result->preferred_formats = array('geojson', 'map', 'php'); } diff --git a/app/Tdt/Core/Datasets/DatasetController.php b/app/Tdt/Core/Datasets/DatasetController.php index 41fd1208..55cc46ec 100755 --- a/app/Tdt/Core/Datasets/DatasetController.php +++ b/app/Tdt/Core/Datasets/DatasetController.php @@ -9,7 +9,6 @@ use Tdt\Core\ApiController; use Tdt\Core\Formatters\FormatHelper; use EasyRdf\RdfNamespace; -use Log; /** * DatasetController @@ -29,7 +28,7 @@ class DatasetController extends ApiController * @return \Response */ public function get($uri) - { + { // Check permissions Auth::requirePermissions('dataset.view'); @@ -56,14 +55,13 @@ public function get($uri) $cache_string .= http_build_query($query_string_params); $cache_string = sha1($cache_string); - + if (Cache::has($cache_string)) { return ContentNegotiator::getResponse(Cache::get($cache_string), $extension); } else { // Get definition $definition = $this->definition->getByIdentifier($uri); - if ($definition) { // Get source definition $source_definition = $this->definition->getDefinitionSource( @@ -71,19 +69,24 @@ public function get($uri) $definition['source_type'] ); - //when requesting data, the formatter should notice the linked job, - // and treat it as an elasticsearch data type. - - if ($definition['job_id'] != null) { + if($definition['xslt_file']) { + $source_definition['xslt_file'] = $definition['xslt_file']; + } - $source_definition['type'] = 'ELASTICSEARCH'; - $source_definition['host'] = "http://tdt.dev/"; - $source_definition['port'] = "9200"; - $source_definition['username'] = ''; - $source_definition['password'] = ''; - $source_definition['es_type'] = $definition['collection_uri'].'_'.$definition['resource_name']; - $source_definition['es_index'] = "datatank"; - } + // when requesting data, the formatter should notice the linked job, + // and treat it as an elasticsearch data type. + if (! is_null(@$definition['job_id'])) { + // Get the job from the definition + $job = \Job::find($definition['job_id']); + + $source_definition['type'] = 'ELASTICSEARCH'; + $source_definition['host'] = $job->loader->host; + $source_definition['port'] = $job->loader->port; + $source_definition['username'] = empty($job->loader->username) ? '' : $job->loader->username; + $source_definition['password'] = empty($job->loader->password) ? '' : $job->loader->password; + $source_definition['es_type'] = $job->loader->es_type; + $source_definition['es_index'] = $job->loader->es_index; + } if ($source_definition) { $source_type = $source_definition['type']; @@ -115,18 +118,18 @@ public function get($uri) // Retrieve dataobject from datacontroller $data = $data_controller->readData($source_definition, $rest_parameters); - // If the source type is XML, just return the XML contents, don't transform - if (strtolower($source_type) == 'xml' && $extension == 'xml') { - return $this->createXMLResponse($data->data); - } elseif (strtolower($source_type) == 'xml' && $extension == 'kml' && $data->geo_formatted) { - return $this->createXMLResponse($data->data); - } elseif (! $data->is_semantic && $extension == 'xml' && $source_type != 'xml') { - \App::abort(406, 'The requested format for the datasource is not available.'); - } elseif (strtolower($source_type) == 'xml' && ! $data->geo_formatted && ! empty($extension) && $extension != 'xml') { - \App::abort(406, 'The requested format for the datasource is not available.'); - } elseif (strtolower($source_type) == 'xml' && $data->geo_formatted && ! empty($extension) && ! in_array($extension, $data->preferred_formats)) { - \App::abort(406, 'The requested format for the datasource is not available.'); - } +// // If the source type is XML, just return the XML contents, don't transform +// if (strtolower($source_type) == 'xml' && $extension == 'xml') { +// return $this->createXMLResponse($data->data); +// } elseif (strtolower($source_type) == 'xml' && $extension == 'kml' && $data->geo_formatted) { +// return $this->createXMLResponse($data->data); +// } elseif (! $data->is_semantic && $extension == 'xml' && $source_type != 'xml') { +// \App::abort(406, 'The requested format for the datasource is not available.'); +// } elseif (strtolower($source_type) == 'xml' && ! $data->geo_formatted && ! empty($extension) && $extension != 'xml') { +// \App::abort(406, 'The requested format for the datasource is not available.'); +// } elseif (strtolower($source_type) == 'xml' && $data->geo_formatted && ! empty($extension) && ! in_array($extension, $data->preferred_formats)) { +// \App::abort(406, 'The requested format for the datasource is not available.'); +// } $data->rest_parameters = $rest_parameters; @@ -184,7 +187,7 @@ public function get($uri) // Add source definition to the object $data->source_definition = $source_definition; - + // Add dataset updates information to the object $data->updates_info = \DB::table('definitions_updates') ->where('definition_id', $definition['id']) @@ -196,7 +199,7 @@ public function get($uri) // Add the available, supported formats to the object $format_helper = new FormatHelper(); $data->formats = $format_helper->getAvailableFormats($data); - + // Store in cache if (! empty($definition['cache_minutes'])) { Cache::put($cache_string, $data, $definition['cache_minutes']); @@ -242,7 +245,7 @@ public function get($uri) // Return the formatted response with content negotiation return ContentNegotiator::getResponse($data, $extension); } else { - \App::abort(404, "The dataset or collection you were looking for could not be found (URI: $uri)."); + \App::abort(404, "The dataset or collection you were looking for could not be found (URI)."); } } diff --git a/app/Tdt/Core/Definitions/DefinitionController.php b/app/Tdt/Core/Definitions/DefinitionController.php index 2a11e1b4..9f9b5c10 100755 --- a/app/Tdt/Core/Definitions/DefinitionController.php +++ b/app/Tdt/Core/Definitions/DefinitionController.php @@ -9,6 +9,8 @@ use Tdt\Core\ApiController; use Tdt\Core\Repositories\Interfaces\DefinitionRepositoryInterface; use Config; +use File; +use ZipArchive; /** * DefinitionController @@ -27,7 +29,9 @@ public function __construct(DefinitionRepositoryInterface $definitions) } /** - * Create and Link Job (elasticsearch): Get the class without the namespace + * Return a class without the namespace + * + * @return string */ private function getClass($obj) { @@ -53,8 +57,8 @@ private function validateParameters($type, $short_name, $params) $rules = $type::getCreateValidators(); foreach ($create_params as $key => $info) { - if (!array_key_exists($key, $params)) { - if (!empty($info['required']) && $info['required']) { + if (! array_key_exists($key, $params)) { + if (! empty($info['required']) && $info['required']) { if (strtolower($type) != 'job') { \App::abort( 400, @@ -66,9 +70,8 @@ private function validateParameters($type, $short_name, $params) } $validated_params[$key] = @$info['default_value']; - } else { - if (!empty($rules[$key])) { + if (! empty($rules[$key])) { $validator = \Validator::make( array($key => $params[$key]), array($key => $rules[$key]) @@ -97,9 +100,9 @@ private function getClassOfType($params, $ns) $type = @$params['type']; $type = ucfirst(mb_strtolower($type)); - $class_name = $ns . "\\" . $type; + $class_name = $ns . '\\' . $type; - if (!class_exists($class_name)) { + if (! class_exists($class_name)) { \App::abort(400, "The given type ($type) is not a $ns type."); } @@ -116,7 +119,12 @@ private function getClassOfType($params, $ns) } /** - * Create and Link Job (elasticsearch): Create a new job + * Create and and return the job linked to the new definition + * + * @param string $uri The URI of the datasource + * @param array $input The input of the request + * + * @return integer The ID of the job */ public function createLinkJob($uri, $input) { @@ -131,28 +139,36 @@ public function createLinkJob($uri, $input) // Extract class construction $params = []; $params['extract']['type'] = $input['original-dataset-type']; - $params['extract']['uri'] = $input['uri']; - if ($params['extract']['type'] == "csv") { + if ($params['extract']['type'] == 'csv') { $params['extract']['delimiter'] = $input['delimiter']; $params['extract']['has_header_row'] = $input['has_header_row']; $params['extract']['encoding'] = 'UTF-8'; - } elseif ($params['extract']['type'] == "xml") { - $params['extract']['array_level']=$input['array_level']; + $params['extract']['uri'] = $input['uri']; + } elseif ($params['extract']['type'] == 'xml') { + $params['extract']['array_level'] = $input['array_level']; $params['extract']['encoding'] = 'UTF-8'; - } elseif ($params['extract']['type'] == "json") { - /* No extra fields */ + $params['extract']['uri'] = $input['uri']; + } elseif ($params['extract']['type'] == 'json') { + $params['extract']['uri'] = $input['uri']; + } elseif ($params['extract']['type'] = 'mysql') { + $params['extract']['database'] = $input['database']; + $params['extract']['host'] = $input['mysql_host']; + $params['extract']['port'] = @$input['mysql_port']; + $params['extract']['query'] = $input['query']; + $params['extract']['username'] = $input['mysql_username']; + $params['extract']['password'] = $input['mysql_password']; + $params['extract']['collation'] = @$input['collation']; } - // Load class construction (always elasticsearch) $params['load']['type'] = 'elasticsearch'; - $params['load']['host'] = $input['host']; - $params['load']['port'] = $input['port']; - $params['load']['es_index'] = $input['es_index']; - $params['load']['es_type'] = $collection_uri.'_'.$name; - $params['load']['username'] = $input['username']; - $params['load']['password'] = $input['password']; + $params['load']['host'] = \Config::get('database.connections.tdt_elasticsearch.host', 'localhost'); + $params['load']['port'] = \Config::get('database.connections.tdt_elasticsearch.port', 9200); + $params['load']['es_index'] = \Config::get('database.connections.tdt_elasticsearch.index', 'datatank'); + $params['load']['es_type'] = str_replace(' ', '_', trim($collection_uri) . '_' . trim($name)); + $params['load']['username'] = \Config::get('database.connections.tdt_elasticsearch.username', ''); + $params['load']['password'] = \Config::get('database.connections.tdt_elasticsearch.password', ''); // Add schedule $params['schedule'] = $input['schedule']; @@ -194,16 +210,9 @@ public function createLinkJob($uri, $input) $job_name = $job->collection_uri . '/' . $job->name; - \Queue::push(function ($queued_job) use ($job_name) { - \Artisan::call('input:execute', [ - 'jobname' => $job_name - ]); - - $queued_job->delete(); - }); + $this->addJobToQueue($job_name, $uri); return $job->id; - } /** @@ -216,7 +225,6 @@ private function editLinkedJob($uri, $input) // Set permission Auth::requirePermissions('definition.update'); - $job = \Job::whereRaw("? like CONCAT(collection_uri, '/', name , '/', '%')", array($uri . '/')) ->with('extractor', 'loader')->first(); @@ -228,234 +236,56 @@ private function editLinkedJob($uri, $input) // Extract class construction $params = []; $params['extract']['type'] = $input['original-dataset-type']; - $params['extract']['uri'] = $input['uri']; - - if ($params['extract']['type'] == "csv") { - $params['extract']['delimiter'] = $input['delimiter']; - $params['extract']['has_header_row'] = $input['has_header_row']; - $params['extract']['encoding'] = 'UTF-8'; - } elseif ($params['extract']['type'] == "xml") { - $params['extract']['array_level']=$input['array_level']; - $params['extract']['encoding'] = 'UTF-8'; - } elseif ($params['extract']['type'] == "json") { - /* No extra fields */ - } - - - // Load class construction (always elasticsearch) - $params['load']['type'] = 'elasticsearch'; - $params['load']['host'] = 'localhost'; - $params['load']['port'] = 9200; - $params['load']['es_index'] = ''; - $params['load']['es_type'] = $collection_uri.'_'.$name; - $params['load']['username'] = ''; - $params['load']['password'] = ''; - - // Add schedule - $params['schedule'] = $job->schedule; - - // Validate the job properties - $job_params = $this->validateParameters('Job', 'job', $params); - - // Check which parts are set for validation purposes - $extract = @$params['extract']; - $load = @$params['load']; - - // Check for every ETL part if the type is supported - $extractor = $this->getClassOfType(@$extract, 'Extract'); - $loader = $this->getClassOfType(@$load, 'Load'); - - $job->extractor()->delete(); - $job->loader()->delete(); - - $extractor->save(); - $loader->save(); - - // Add the validated job params - foreach ($job_params as $key => $value) { - $job->$key = $value; - } - - $job->extractor_id = $extractor->id; - $job->extractor_type = $this->getClass($extractor); - - $job->loader_id = $loader->id; - $job->loader_type = $this->getClass($loader); - $job->save(); - - // Push the job to the queue - $job_name = $job->collection_uri . '/' . $job->name; - - \Queue::push(function ($queued_job) use ($job_name) { - \Artisan::call('input:execute', ['jobname' => $job_name]); - - $queued_job->delete(); - }); - - $job->added_to_queue = true; - $job->save(); - - return $job->id; - } - - /** - * Create and Link Job (elasticsearch): Get the class without the namespace - */ - private function getClass($obj) - { - if (is_null($obj)) { - return null; - } - - $class_pieces = explode('\\', get_class($obj)); - $class = ucfirst(mb_strtolower(array_pop($class_pieces))); - - return implode('\\', $class_pieces) . '\\' . $class; - } - - /** - * Create and Link Job (elasticsearch): Validate the create parameters based on the rules of a certain job. - * If something goes wrong, abort the application and return a corresponding error message. - * - * @param string $type - * @param string $short_name - * @param array $params - * - * @return array - */ - private function validateParameters($type, $short_name, $params) - { - $validated_params = array(); - - $create_params = $type::getCreateProperties(); - $rules = $type::getCreateValidators(); - - foreach ($create_params as $key => $info) { - if (! array_key_exists($key, $params)) { - if (! empty($info['required']) && $info['required']) { - if (strtolower($type) != 'job') { - \App::abort( - 400, - "The parameter '$key' of the $short_name-part of the job configuration is required but was not passed." - ); - } else { - \App::abort(400, "The parameter '$key' is required to create a job but was not passed."); - } - } - - $validated_params[$key] = @$info['default_value']; - - } else { - if (! empty($rules[$key])) { - $validator = \Validator::make( - array($key => $params[$key]), - array($key => $rules[$key]) - ); - - if ($validator->fails()) { - \App::abort( - 400, - "The validation failed for parameter $key with value '$params[$key]', make sure the value is valid." - ); - } - } - - $validated_params[$key] = $params[$key]; - } - } - - return $validated_params; - } - - /** - * Create and Link Job (elasticsearch): Check if a given type of the ETL exists. - */ - private function getClassOfType($params, $ns) - { - $type = @$params['type']; - $type = ucfirst(mb_strtolower($type)); - - $class_name = $ns . '\\' . $type; - - if (! class_exists($class_name)) { - \App::abort(400, "The given type ($type) is not a $ns type."); - } - - $class = new $class_name(); - - // Validate the properties of the given type - $validated_params = $this->validateParameters($class, $type, $params); - - foreach ($validated_params as $key => $value) { - $class->$key = $value; - } - - return $class; - } - - /** - * Create and and return the job linked to the new definition - * - * @param string $uri The URI of the datasource - * @param array $input The input of the request - * - * @return integer The ID of the job - */ - public function createLinkJob($uri, $input) - { - // Set permission - Auth::requirePermissions('definition.create'); - - preg_match('/(.*)\/([^\/]*)$/', $uri, $matches); - - $collection_uri = @$matches[1]; - $name = @$matches[2]; - - // Extract class construction - $params = []; - $params['extract']['type'] = $input['original-dataset-type']; - $params['extract']['uri'] = $input['uri']; if ($params['extract']['type'] == 'csv') { $params['extract']['delimiter'] = $input['delimiter']; $params['extract']['has_header_row'] = $input['has_header_row']; $params['extract']['encoding'] = 'UTF-8'; + $params['extract']['uri'] = $input['uri']; } elseif ($params['extract']['type'] == 'xml') { $params['extract']['array_level'] = $input['array_level']; $params['extract']['encoding'] = 'UTF-8'; + $params['extract']['uri'] = $input['uri']; + } elseif ($params['extract']['type'] == 'json') { + $params['extract']['uri'] = $input['uri']; + } elseif ($params['extract']['type'] = 'mysql') { + $params['extract']['database'] = $input['database']; + $params['extract']['host'] = $input['mysql_host']; + $params['extract']['port'] = @$input['mysql_port']; + $params['extract']['query'] = $input['query']; + $params['extract']['username'] = $input['mysql_username']; + $params['extract']['password'] = $input['mysql_password']; + $params['extract']['collation'] = @$input['collation']; } - // Load class construction (always elasticsearch) $params['load']['type'] = 'elasticsearch'; $params['load']['host'] = \Config::get('database.connections.tdt_elasticsearch.host', 'localhost'); $params['load']['port'] = \Config::get('database.connections.tdt_elasticsearch.port', 9200); $params['load']['es_index'] = \Config::get('database.connections.tdt_elasticsearch.index', 'datatank'); - $params['load']['es_type'] = trim($collection_uri) . '_' . trim($name); - $params['load']['username'] = $input['username']; - $params['load']['password'] = $input['password']; + $params['load']['es_type'] = str_replace(' ', '_', trim($collection_uri) . '_' . trim($name)); + $params['load']['username'] = \Config::get('database.connections.tdt_elasticsearch.username', ''); + $params['load']['password'] = \Config::get('database.connections.tdt_elasticsearch.password', ''); // Add schedule - $params['schedule'] = $input['schedule']; + $params['schedule'] = $job->schedule; // Validate the job properties $job_params = $this->validateParameters('Job', 'job', $params); + // Check which parts are set for validation purposes $extract = @$params['extract']; $load = @$params['load']; - // Check for every emlp part if the type is supported + // Check for every ETL part if the type is supported $extractor = $this->getClassOfType(@$extract, 'Extract'); $loader = $this->getClassOfType(@$load, 'Load'); - // Save the emlp models + $job->extractor()->delete(); + $job->loader()->delete(); + $extractor->save(); $loader->save(); - // Create the job associated with emlp relations - $job = new \Job(); - $job->collection_uri = $collection_uri; - $job->name = $name; - // Add the validated job params foreach ($job_params as $key => $value) { $job->$key = $value; @@ -468,19 +298,13 @@ public function createLinkJob($uri, $input) $job->loader_type = $this->getClass($loader); $job->save(); - // Execute the job for a first time - $job->date_executed = time(); - $job->save(); - $job_name = $job->collection_uri . '/' . $job->name; - \Queue::push(function ($queued_job) use ($job_name) { - \Artisan::call('input:execute', [ - 'jobname' => $job_name - ]); + // Push the job to the queue + $this->addJobToQueue($job_name, $uri); - $queued_job->delete(); - }); + $job->added_to_queue = true; + $job->save(); return $job->id; } @@ -514,16 +338,24 @@ public function put($uri) $input['resource_name'] = @$matches[2]; // Add uploaded file and change uri. - if (isset($input['fileupload']) && $input['fileupload'] !='') { - $input['uri'] = 'file://'.$input['fileupload']; + if (isset($input['fileupload']) && $input['fileupload'] != '') { + $input['uri'] = 'file://' . $input['fileupload']; + } + + // Add uploaded file XSLT and change xslt_file. + if (isset($input['fileupload_xslt']) && $input['fileupload_xslt'] != '') { + $file2 = $input['fileupload_xslt']; + $file3 = explode('\\', $file2); + + $input['xslt_file'] = 'file://' . app_path() . '/storage/app/' . $file3[2] . '_' . date('Y-m-d') . '.xslt'; } // Check if dataset should be indexed if (isset($input['to_be_indexed']) && $input['to_be_indexed'] == 1) { - $input['es_type'] = $input['collection_uri'].'_'.$input['resource_name']; + $input['es_type'] = $input['collection_uri'] . '_' . $input['resource_name']; - //if a new job is stored and it needs to be indexed, set the draft flag to true - $input['draft_flag']= 1; + // if a new job is stored and it needs to be indexed, set the draft flag to true + $input['draft_flag'] = 1; } // Validate the input @@ -536,6 +368,7 @@ public function put($uri) } // Create the new definition + $input = $this->processZip($input); $definition = $this->definitions->store($input); // Check if dataset should be indexed: create job and link with previously created definition. @@ -543,22 +376,9 @@ public function put($uri) // Create new job $job_id = $this->createLinkJob($uri, $input); - //when a job is done, the definition needs to be checked, if the draft is set to true, set it to false. - $input['draft_flag']= 0; - - // Link job with definition through job_id column. $input['job_id'] = $job_id; - $definition = $this->definitions->update($uri, $input); // update previously created definition - } - // Check if dataset should be indexed: create job and link with previously created definition. - if (isset($input['to_be_indexed']) && $input['to_be_indexed'] == 1) { - // Create new job - $job_id = $this->createLinkJob($uri, $input); - - // Link job with definition through job_id column. - $input['job_id'] = $job_id; $definition = $this->definitions->update($uri, $input); // update previously created definition } @@ -575,6 +395,67 @@ public function put($uri) return $response; } + /** + * Check for any zip files as a URI for SHP data sources + * + * @param array $input + * @return array + */ + private function processZip($input) + { + $datasetType = @$input['original-dataset-type']; + + if (empty($input['original-dataset-type'])) { + $definition = \App::make('Tdt\Core\Repositories\Interfaces\DefinitionRepositoryInterface')->getByIdentifier($input['collection_uri'] . '/' . $input['resource_name']); + + $datasetType = $definition['source_type']; + } + + $datasetType = strtolower($datasetType); + + if ($datasetType == 'shp') { + // Check for a zip file as a URI + if (ends_with($input['uri'], '.zip')) { + $uri = $input['uri']; + $uri = str_replace('file://', '', $uri); + + $zip = new ZipArchive; + $success = $zip->open($uri); + + if ($success === true) { + $path = storage_path() . '/app/' . str_random(5); + + mkdir($path); + + $zip->extractTo($path); + $zip->close(); + + // Get the shp file in the new directory + $files = scandir($path); + $shp_file = ''; + + foreach ($files as $file) { + if (strlen($file) > 4) { + chmod($path . '/' . $file, 0655); + } + + if (ends_with($file, '.shp')) { + $shp_file = $file; + } + } + + if (! empty($shp_file)) { + $input['uri'] = $path . '/' . $shp_file; + } else { + throw new \Exception('No shape file was found in the zip archive.'); + } + } + } + } + + return $input; + } + /** * Delete a definition based on the URI given. */ @@ -613,6 +494,7 @@ public function patch($uri) $input['user_id'] = $definition['user_id']; $input['username'] = $definition['username']; + $input['xslt_file'] = $definition['xslt_file']; // Keep associated job $input['job_id'] = $definition['job_id']; @@ -622,18 +504,22 @@ public function patch($uri) $input['collection_uri'] = @$matches[1]; $input['resource_name'] = @$matches[2]; - // Add uploaded file and change uri. - // TODO: Validate file extension based on selected dataset/definition. - if(isset($input['fileupload']) && $input['fileupload'] !='') { - $input['uri'] = 'file://'.$input['fileupload']; - } // Add uploaded file and change uri. // TODO: Validate file extension based on selected dataset/definition. - if(isset($input['fileupload']) && $input['fileupload'] != '') { + if (isset($input['fileupload']) && $input['fileupload'] != '') { $input['uri'] = 'file://' . $input['fileupload']; } + //Add uploaded xslt file + if (isset($input['fileupload_xslt']) && $input['fileupload_xslt'] != '') { + + $file2 = $input['fileupload_xslt']; + $file3 = explode('\\', $file2); + + $input['xslt_file'] = 'file://' . app_path() . '/storage/app/' . $file3[2] . '_' . date('Y-m-d') . '.xslt'; + } + // Validate the input $validator = $this->definitions->getValidator($input); @@ -642,6 +528,7 @@ public function patch($uri) \App::abort(400, $message); } + $input = $this->processZip($input); $this->definitions->update($uri, $input); // Dataset updates control @@ -653,8 +540,9 @@ public function patch($uri) ); // Check if dataset has a linked job (for updating purposes only if uri dataset field has been modified) - if ($definition['job_id'] != null && isset($input['fileupload']) && $input['fileupload'] !='') { - $input['original-dataset-type'] = strtolower(chop($definition['source_type'],"Definition")); + if ($definition['job_id'] != null && isset($input['fileupload']) && $input['fileupload'] != '') { + $input['original-dataset-type'] = strtolower(chop($definition['source_type'], 'Definition')); + $job_id = $this->editLinkedJob($uri, $input); } @@ -745,4 +633,32 @@ private function fetchInput() return $input; } + + /** + * Execute a job for a definition + * + * @param string $job_name + * @param string $definition_uri + * @return void + */ + private function addJobToQueue($job_name, $definition_uri) + { + $definitions = \App::make('Tdt\Core\Repositories\Interfaces\DefinitionRepositoryInterface'); + + $definition = $definitions->getByIdentifier($definition_uri); + $definition['draft_flag'] = true; + $definitions->update($definition_uri, $definition); + + \Queue::push(function ($queued_job) use ($job_name, $definition_uri, $definitions) { + \Artisan::call('input:execute', [ + 'jobname' => $job_name + ]); + + $definition = $definitions->getByIdentifier($definition_uri); + $definition['draft_flag'] = false; + $definitions->update($definition_uri, $definition); + + $queued_job->delete(); + }); + } } diff --git a/app/Tdt/Core/Formatters/CSVFormatter.php b/app/Tdt/Core/Formatters/CSVFormatter.php index bface325..facae3c3 100644 --- a/app/Tdt/Core/Formatters/CSVFormatter.php +++ b/app/Tdt/Core/Formatters/CSVFormatter.php @@ -10,6 +10,14 @@ * @author Pieter Colpaert * @author Michiel Vancoillie */ + +use DOMDocument; +use Log; +use File; +use XSLTProcessor; + +//use SoapBox\Formatter\Formatter; + class CSVFormatter implements IFormatter { public static function createResponse($dataObj) @@ -25,53 +33,74 @@ public static function createResponse($dataObj) public static function getBody($dataObj) { + // Check if its a result of a SPARQL select query if ($dataObj->source_definition['type'] == 'SPARQL' && $dataObj->source_definition['query_type'] == 'select') { $dataObj->data = self::buildTableFromSparqlResult($dataObj->data); } + //XML format if (!is_array($dataObj->data)) { - \App::abort(400, "You can only request a CSV formatter on a tabular data structure."); - } + + $name_xslt_file=$dataObj->source_definition['xslt_file']; + + $xml = new DOMDocument(); + $xml->loadXML($dataObj->data ); + + $xsl = new DOMDocument; + $xsl->load($name_xslt_file); + + + // Processor + $proc = new XSLTProcessor; + $proc->importStyleSheet($xsl); // adjunta las reglas XSL + + $csv= $proc->transformToXML($xml); + +// custom error handler + + }else{ // Build the body $body = ''; $header_printed = false; - foreach ($dataObj->data as $row) { - if (is_object($row)) { - $row = get_object_vars($row); - } elseif (!is_array($row)) { - $body .= $row . "\n"; - continue; - } + foreach ($dataObj->data as $row) { + if (is_object($row)) { + $row = get_object_vars($row); + } elseif (!is_array($row)) { + $body .= $row . "\n"; + continue; + } + + // Print header + if (!$header_printed) { + $i = 0; + foreach ($row as $key => $value) { + $body .= CSVFormatter::enclose($key); + $body .= sizeof($row)-1 != $i ? ";" : "\n"; + $i++; + } + $header_printed = true; + } - // Print header - if (!$header_printed) { $i = 0; - foreach ($row as $key => $value) { - $body .= CSVFormatter::enclose($key); + foreach ($row as $element) { + if (is_object($element)) { + \App::abort(400, "You can only request a CSV formatter on a tabular datastructure."); + } elseif (is_array($element)) { + \App::abort(400, "You can only request a CSV formatter on a tabular datastructure."); + } else { + $body .= CSVFormatter::enclose($element); + } $body .= sizeof($row)-1 != $i ? ";" : "\n"; $i++; } - $header_printed = true; - } - - $i = 0; - foreach ($row as $element) { - if (is_object($element)) { - \App::abort(400, "You can only request a CSV formatter on a tabular datastructure."); - } elseif (is_array($element)) { - \App::abort(400, "You can only request a CSV formatter on a tabular datastructure."); - } else { - $body .= CSVFormatter::enclose($element); - } - $body .= sizeof($row)-1 != $i ? ";" : "\n"; - $i++; } + $csv=$body; } - return $body; + return $csv; } public static function getDocumentation() diff --git a/app/Tdt/Core/Formatters/FormatHelper.php b/app/Tdt/Core/Formatters/FormatHelper.php index cae2ba1d..99f63928 100644 --- a/app/Tdt/Core/Formatters/FormatHelper.php +++ b/app/Tdt/Core/Formatters/FormatHelper.php @@ -14,7 +14,7 @@ class FormatHelper /** * Return a list of the available formats that the data structure can be formatted into * - * @param Tdt\Core\Datasets\Data $data + * @param Tdt\Core\Datasets\Data $data * @return array */ public function getAvailableFormats($data) @@ -44,6 +44,9 @@ public function getFormatsForType($source_definition) $formats['KML'] = 'kml'; $formats['GeoJSON'] = 'geojson'; $formats['WKT'] = 'WKT'; + } elseif (isset($source_definition['xslt_file'])) { + $formats['CSV'] = 'csv'; + $formats['XML'] = 'xml'; } else { $formats['XML'] = 'xml'; } diff --git a/app/Tdt/Core/Formatters/KMLFormatter.php b/app/Tdt/Core/Formatters/KMLFormatter.php index 24b11cac..1ec054e7 100644 --- a/app/Tdt/Core/Formatters/KMLFormatter.php +++ b/app/Tdt/Core/Formatters/KMLFormatter.php @@ -2,8 +2,6 @@ namespace Tdt\Core\Formatters; -use Tdt\Core\Formatters\GeoJSONFormatter; - /** * KML Formatter * @@ -32,25 +30,25 @@ public static function createResponse($dataObj) public static function getBody($dataObj) { // Check if the original data is not GeoJSON - if ($dataObj->source_definition['type'] == 'XML' && !empty($dataObj->geo_formatted) && $dataObj->geo_formatted) { + if (($dataObj->source_definition['type'] == 'KML' || $dataObj->source_definition['type'] == 'XML') && ! empty($dataObj->source_definition['geo_formatted']) && $dataObj->source_definition['geo_formatted'] == 1) { return $dataObj->data; } self::$definition = $dataObj->definition; - self::$map_property = $dataObj->source_definition['map_property']; + self::$map_property = @$dataObj->source_definition['map_property']; // Build the body // KML header $body = ''; $body .= ''; - $body .= ""; + $body .= ''; // Add the document $body .= self::getPlacemarks($dataObj); - $body .= ""; + $body .= ''; // Close tags - $body .= ""; + $body .= ''; return $body; } @@ -71,18 +69,18 @@ private static function getPlacemarks($dataObj) private static function xmlgetelement($value) { - $result = ""; + $result = ''; return $result; } private static function getExtendedDataElement($values) { - $result = ""; + $result = ''; $ignore = ['parts', 'points', 'point']; foreach ($values as $key => $val) { - if (!in_array($key, $ignore)) { + if (! in_array($key, $ignore)) { $result .= ''; $result .= '' . $key . ''; $result .= '' . $val . ''; @@ -90,7 +88,7 @@ private static function getExtendedDataElement($values) } } - $result .= ""; + $result .= ''; return $result; } @@ -106,11 +104,11 @@ private static function printArray($val) $coords = array(); - if (!empty($array)) { - $coordskey = GeoHelper::keyExists("coords", $array); + if (! empty($array)) { + $coordskey = GeoHelper::keyExists('coords', $array); - if (!$coordskey) { - $coordskey = GeoHelper::keyExists("coordinates", $array); + if (! $coordskey) { + $coordskey = GeoHelper::keyExists('coordinates', $array); } if ($lat_long) { @@ -118,12 +116,12 @@ private static function printArray($val) $extendeddata = self::getExtendedDataElement($array); } elseif ($coordskey) { if (is_array($array[$coordskey])) { - if (!empty($array[$coordskey]['@text'])) { + if (! empty($array[$coordskey]['@text'])) { $array[$coordskey] = $array[$coordskey]['@text']; } } - $coords = explode(";", $array[$coordskey]); + $coords = explode(';', $array[$coordskey]); unset($array[$coordskey]); $name = self::xmlgetelement($array); $extendeddata = self::getExtendedDataElement($array); @@ -134,17 +132,17 @@ private static function printArray($val) if ($lat_long || count($coords) != 0) { $name = htmlspecialchars($key); - if (!empty(self::$map_property) && !empty($array[self::$map_property])) { + if (! empty(self::$map_property) && ! empty($array[self::$map_property])) { $name = $array[self::$map_property]; } $description = ''; - if (!empty($key) && is_numeric($key)) { - $description = "". \URL::to(self::$definition['collection_uri'] . '/' . self::$definition['resource_name']) . '/' . htmlspecialchars($key) ."]]>"; + if (! empty($key) && is_numeric($key)) { + $description = "" . \URL::to(self::$definition['collection_uri'] . '/' . self::$definition['resource_name']) . '/' . htmlspecialchars($key) . ']]>'; } - echo "" . $name . "" . $description . ""; + echo '' . $name . '' . $description . ''; echo $extendeddata; @@ -153,7 +151,7 @@ private static function printArray($val) $lat_val = $array[$lat_long[0]]; $lon_val = $array[$lat_long[1]]; - if (!empty($lat_long[2]) && !empty($array[$lat_long[2]])) { + if (! empty($lat_long[2]) && ! empty($array[$lat_long[2]])) { $z_val = $array[$lat_long[2]]; if (is_array($lat_val)) { @@ -169,7 +167,7 @@ private static function printArray($val) } if ($lat_val != 0 || $lon_val != 0) { - echo "" . $lon_val . "," . $lat_val . "," . $z_val . ""; + echo '' . $lon_val . ',' . $lat_val . ',' . $z_val . ''; } } else { if (is_array($lat_val)) { @@ -181,23 +179,23 @@ private static function printArray($val) } if ($lat_val != 0 || $lon_val != 0) { - echo "" . $lon_val . "," . $lat_val . ""; + echo '' . $lon_val . ',' . $lat_val . ''; } } } - if (count($coords) > 0) { - if (count($coords) == 1) { - echo "" . $coords[0] . ""; + if (count($coords) > 0) { + if (count($coords) == 1) { + echo '' . $coords[0] . ''; } else { - echo ""; + echo ''; foreach ($coords as $coord) { - echo "" . $coord . ""; + echo '' . $coord . ''; } - echo ""; + echo ''; } } - echo ""; + echo ''; } } } @@ -209,7 +207,7 @@ private static function printArray($val) */ private static function getArray($dataObj, $geo) { - $body = ""; + $body = ''; $data = $dataObj->data; @@ -227,33 +225,33 @@ private static function getArray($dataObj, $geo) // We assume that if longitude exists, latitude does as well if the geometry is a single point // A point can either be a single column value, or split up in a latitude and longitude $geo_type = 'point'; - $is_point = (count($geo) > 1) || !empty($geo['point']); + $is_point = (count($geo) > 1) || ! empty($geo['point']); - if (!$is_point) { + if (! $is_point) { $geo_type = key($geo); $column_name = $geo[$geo_type]; } - if (!empty($entry)) { + if (! empty($entry)) { $name = htmlspecialchars($key); - if (!empty($entry['name'])) { + if (! empty($entry['name'])) { $name = $entry['name']; } - if (!empty(self::$map_property) && !empty($entry[self::$map_property])) { + if (! empty(self::$map_property) && ! empty($entry[self::$map_property])) { $name = $entry[self::$map_property]; } $extendeddata = self::getExtendedDataElement($entry); - $description = ""; + $description = ''; - if (!empty($key) && is_numeric($key)) { - $description = "definition['collection_uri'] . '/' . $dataObj->definition['resource_name']) . '/' . htmlspecialchars($key) . ".map'>". \URL::to($dataObj->definition['collection_uri'] . '/' . $dataObj->definition['resource_name']) . '/' . htmlspecialchars($key) ."]]>"; + if (! empty($key) && is_numeric($key)) { + $description = "definition['collection_uri'] . '/' . $dataObj->definition['resource_name']) . '/' . htmlspecialchars($key) . ".map'>" . \URL::to($dataObj->definition['collection_uri'] . '/' . $dataObj->definition['resource_name']) . '/' . htmlspecialchars($key) . ']]>'; } - $body .= "" . $name . "" . $description . ""; + $body .= '' . $name . '' . $description . ''; $body .= $extendeddata; if ($is_point) { @@ -265,40 +263,40 @@ private static function getArray($dataObj, $geo) $point = $entry[$geo['point']]; } - $body .= "" . $point . ""; + $body .= '' . $point . ''; } else { switch ($geo_type) { case 'polylinez': - $body .= ""; + $body .= ''; foreach (explode(';', $entry[$geo['polylinez']]) as $coord) { - $body .= "" . $coord . ""; + $body .= '' . $coord . ''; } - $body .= ""; + $body .= ''; break; case 'polyline': - $body .= ""; + $body .= ''; foreach (explode(';', $entry[$geo['polyline']]) as $coord) { - $body .= "" . $coord . ""; + $body .= '' . $coord . ''; } - $body .= ""; + $body .= ''; break; case 'polygonz': - $body .= "". $entry[$geo['polygonz']] . ""; + $body .= '' . $entry[$geo['polygonz']] . ''; break; case 'polygon': - $body .= "". $entry[$geo['polygon']] . ""; + $body .= '' . $entry[$geo['polygon']] . ''; break; case 'multipoinz': - $body .= ""; + $body .= ''; foreach (explode(';', $entry[$geo['multipointz']]) as $point) { $body .= '' . $point . ''; } $body .= ''; break; case 'multipoint': - $body .= ""; + $body .= ''; foreach (explode(';', $entry[$geo['multipoint']]) as $point) { $body .= '' . $point . ''; } @@ -309,7 +307,7 @@ private static function getArray($dataObj, $geo) break; } } - $body .= ""; + $body .= ''; } } @@ -318,6 +316,6 @@ private static function getArray($dataObj, $geo) public static function getDocumentation() { - return "Returns a KML file with geo properties of the data."; + return 'Returns a KML file with geo properties of the data.'; } } diff --git a/app/Tdt/Core/Formatters/XMLFormatter.php b/app/Tdt/Core/Formatters/XMLFormatter.php index c896927d..5117239e 100644 --- a/app/Tdt/Core/Formatters/XMLFormatter.php +++ b/app/Tdt/Core/Formatters/XMLFormatter.php @@ -2,8 +2,8 @@ namespace Tdt\Core\Formatters; -define("NUMBER_TAG_PREFIX", "_"); -define("DEFAULT_ROOTNAME", "data"); +define('NUMBER_TAG_PREFIX', '_'); +define('DEFAULT_ROOTNAME', 'data'); /** * XML Formatter @@ -37,6 +37,11 @@ public static function createResponse($dataObj) public static function getBody($dataObj) { + // Check if the original data is not GeoJSON + if ($dataObj->source_definition['type'] == 'KML' || $dataObj->source_definition['type'] == 'XML') { + return $dataObj->data; + } + // Rootname equals resource name $rootname = 'root'; @@ -45,7 +50,7 @@ public static function getBody($dataObj) // Check if a configuration is given $conf = array(); - if (!empty($dataObj->semantic->conf)) { + if (! empty($dataObj->semantic->conf)) { $conf = $dataObj->semantic->conf; } @@ -111,14 +116,14 @@ private static function transformToXML($data, $nameobject) self::$isRootElement = false; foreach (self::$prefixes as $prefix => $uri) { - $object .= " xmlns:" . $prefix . "=\"$uri\""; + $object .= ' xmlns:' . $prefix . "=\"$uri\""; } } - $object .=">"; + $object .= '>'; // Check for attributes - if (!empty($data['@attributes'])) { + if (! empty($data['@attributes'])) { $attributes = $data['@attributes']; @@ -131,7 +136,7 @@ private static function transformToXML($data, $nameobject) $name = self::getFullName($name); - $object .= " " . $name . '=' . '"' . htmlspecialchars($value) . '"'; + $object .= ' ' . $name . '=' . '"' . htmlspecialchars($value) . '"'; } $object .= '>'; @@ -206,8 +211,6 @@ private static function transformToXML($data, $nameobject) $object .= ""; } - - return $object; } @@ -230,6 +233,6 @@ private static function isAssociative($arr) public static function getDocumentation() { - return "Prints plain old XML. Watch out for tags starting with an integer: an underscore will be added."; + return 'Prints plain old XML. Watch out for tags starting with an integer: an underscore will be added.'; } } diff --git a/app/Tdt/Core/Repositories/DefinitionRepository.php b/app/Tdt/Core/Repositories/DefinitionRepository.php index 519b1d7f..7f36fa20 100644 --- a/app/Tdt/Core/Repositories/DefinitionRepository.php +++ b/app/Tdt/Core/Repositories/DefinitionRepository.php @@ -103,6 +103,25 @@ public function store(array $input) // Update the facets for the definition $this->updateFacets($definition); + // Create "linked to" datasets + $linked_to_codes = array(); + foreach ($input as $key => $value) { + if (strpos($key, 'linkedto_id') === 0 && ! empty($value)) { + $linked_to_codes[] = preg_replace('/\D/', '', $key); + } + } + + if (! empty($linked_to_codes)) { + $linked_definitions_list = array(); + + foreach ($linked_to_codes as $key => $value) { + $linked_definitions_list[$input['linkedto_id' . $value]] = ['description' => $input['linkedto_desc' . $value]]; + } + + $definition->linkedTo()->sync($linked_definitions_list); + } + // End Create "linked to" datasets + return $definition->toArray(); } @@ -189,6 +208,27 @@ public function update($identifier, array $input) $definition_model->save(); + // Update "linked to" datasets + $linked_to_codes = array(); + foreach ($input as $key => $value) { + if (strpos($key, 'linkedto_id') === 0 && ! empty($value)) { + $linked_to_codes[] = preg_replace('/\D/', '', $key); + } + } + + if (! empty($linked_to_codes)) { + $linked_definitions_list = array(); + + foreach ($linked_to_codes as $key => $value) { + $linked_definitions_list[$input['linkedto_id' . $value]] = ['description' => $input['linkedto_desc' . $value]]; + } + + $definition_model->linkedTo()->sync($linked_definitions_list); + } else { + $definition_model->linkedTo()->detach(); + } + // End Update "linked to" datasets + return $definition_model->toArray(); } @@ -210,6 +250,9 @@ public function delete($identifier) $attribution->delete(); } + // Delete "links" to/from this dataset + $definition->linkedFrom()->detach(); + $definition->linkedTo()->detach(); return $definition->delete(); } } @@ -519,6 +562,7 @@ public function getDescriptionInfo($identifier) // Get the formats based on the source definition meta-data $format_helper = new FormatHelper(); + $formats = $format_helper->getFormatsForType($source_definition->toArray()); $properties['formats'] = $formats; @@ -817,24 +861,6 @@ public function getCreateParameters() 'type' => 'integer', 'description' => 'draft flag', ), - 'job_id' => array( - 'required' => false, - 'name' => 'Related job', - 'type' => 'integer', - 'description' => 'Job linked to this dataset.', - ), - 'original_file' => array( - 'required' => false, - 'name' => 'Original file', - 'type' => 'string', - 'description' => 'Original dataset file.', - ), - 'draft_flag' => array( - 'required' => false, - 'name' => 'draft flag', - 'type' => 'integer', - 'description' => 'draft flag', - ), 'user_id' => array( 'required' => true, 'name' => 'User id', @@ -847,6 +873,12 @@ public function getCreateParameters() 'type' => 'string', 'description' => 'User (username) who created this dataset.', ), + 'xslt_file' => array( + 'required' => false, + 'name' => 'xslt_file', + 'type' => 'string', + 'description' => 'XSLT file.', + ), 'keywords' => array( 'required' => false, 'requiredgeodcat' => 'required', diff --git a/app/Tdt/Core/Repositories/MysqlDefinitionRepository.php b/app/Tdt/Core/Repositories/MysqlDefinitionRepository.php index 0b868081..75b03437 100644 --- a/app/Tdt/Core/Repositories/MysqlDefinitionRepository.php +++ b/app/Tdt/Core/Repositories/MysqlDefinitionRepository.php @@ -49,58 +49,66 @@ public function getAllParameters() protected function extractColumns($input) { - $db_config = array( - 'driver' => 'mysql', - 'host' => $input['host'], - 'database' => $input['database'], - 'username' => $input['username'], - 'password' => $input['password'], - 'charset' => 'utf8', - 'collation' => $input['collation'], - ); - - // Configure a connection - \Config::set('database.connections.testconnection', $db_config); - - // Make a database connection - $db = \DB::connection('testconnection'); + try { + $db_config = array( + 'driver' => 'mysql', + 'host' => $input['mysql_host'], + 'database' => $input['database'], + 'username' => $input['mysql_username'], + 'password' => $input['mysql_password'], + 'charset' => 'utf8', + 'collation' => $input['collation'], + 'port' => $input['mysql_port'], + ); + + // Configure a connection + \Config::set('database.connections.testconnection', $db_config); + + // Make a database connection + $db = \DB::connection('testconnection'); + + // Get the schema builder of the database connection + $schema = $db->getSchemaBuilder(); + $connection = $schema->getConnection(); + $result = $connection->selectOne($input['query']); + + if (empty($result)) { + \App::abort(400, 'The query did not return any results.'); + } - // Get the schema builder of the database connection - $schema = $db->getSchemaBuilder(); - $connection = $schema->getConnection(); - $result = $connection->selectOne($input['query']); + $db_columns = array_keys((array)$result); - if (empty($result)) { - \App::abort(400, 'The query did not return any results.'); - } + $columns_info = @$config['columns']; + $pk = @$config['pk']; - $db_columns = array_keys((array)$result); + // Prepare the aliases + $aliases = array(); - $columns_info = @$config['columns']; - $pk = @$config['pk']; + if (! empty($columns_info)) { + foreach ($columns_info as $column_info) { + $aliases[$column_info['index']] = $column_info['column_name_alias']; + } + } - // Prepare the aliases - $aliases = array(); + // Create the columns array + $columns = array(); - if (!empty($columns_info)) { - foreach ($columns_info as $column_info) { - $aliases[$column_info['index']] = $column_info['column_name_alias']; + foreach ($db_columns as $index => $column) { + array_push($columns, array( + 'index' => $index, + 'column_name' => $column, + 'column_name_alias' => empty($aliases[$index]) ? $column : $aliases[$index], + 'pk' => ($pk === $index) + )); } - } - // Create the columns array - $columns = array(); + return $columns; + } catch (\Exception $ex) { + \Log::error('Something went wrong while extracting columns from the query results, or the query itself'); + \Log::error($ex->getMessage()); - foreach ($db_columns as $index => $column) { - array_push($columns, array( - 'index' => $index, - 'column_name' => $column, - 'column_name_alias' => empty($aliases[$index]) ? $column : $aliases[$index], - 'pk' => ($pk === $index) - )); + throw new \Exception('Something went wrong while connecting to the database, make sure the application can reach the database with the given credentials and that the query is valid. The technical error we got was: ' . $ex->getMessage()); } - - return $columns; } /** @@ -109,13 +117,13 @@ protected function extractColumns($input) public function getCreateParameters() { return array( - 'host' => array( + 'mysql_host' => array( 'required' => true, 'name' => 'Host', 'description' => 'The host of the MySQL database.', 'type' => 'string', ), - 'port' => array( + 'mysql_port' => array( 'required' => false, 'name' => 'Port', 'description' => 'The port of the MySQL database where a connection can be made to.', @@ -128,13 +136,13 @@ public function getCreateParameters() 'description' => 'The name of the database where the datatable, that needs to be published, resides.', 'type' => 'string', ), - 'username' => array( + 'mysql_username' => array( 'required' => true, 'name' => 'Username', 'description' => 'A username that has read permissions on the provided datatable. Safety first, make sure the user only has read permissions.', 'type' => 'string', ), - 'password' => array( + 'mysql_password' => array( 'required' => false, 'name' => 'Password', 'description' => 'The password for the user that has read permissions.', diff --git a/app/Tdt/Core/Repositories/ShpDefinitionRepository.php b/app/Tdt/Core/Repositories/ShpDefinitionRepository.php index c86e1486..1a0ba649 100644 --- a/app/Tdt/Core/Repositories/ShpDefinitionRepository.php +++ b/app/Tdt/Core/Repositories/ShpDefinitionRepository.php @@ -58,7 +58,7 @@ public function getCreateParameters() 'required' => false, 'name' => 'EPSG code', 'description' => 'This parameter holds the EPSG code in which the geometric properties in the shape file are encoded.', - 'default_value' => "4326", + 'default_value' => '4326', 'type' => 'list', 'list' => 'api/geoprojections', 'list_option' => 'epsg', diff --git a/app/Tdt/Core/Repositories/TabularBaseRepository.php b/app/Tdt/Core/Repositories/TabularBaseRepository.php index 93c76e45..d35a17ec 100644 --- a/app/Tdt/Core/Repositories/TabularBaseRepository.php +++ b/app/Tdt/Core/Repositories/TabularBaseRepository.php @@ -25,7 +25,7 @@ public function store(array $input) $columns = array(); - if (!empty($input['columns'])) { + if (! empty($input['columns'])) { $columns = $input['columns']; } @@ -37,11 +37,11 @@ public function store(array $input) $geo = array(); - if (!empty($input['geo'])) { + if (! empty($input['geo'])) { $geo = $input['geo']; } - if (!empty($geo) || !empty($extracted_geo)) { + if (! empty($geo) || ! empty($extracted_geo)) { $geo = $this->geo_repository->validateBulk($extracted_geo, $geo); } @@ -59,7 +59,7 @@ public function store(array $input) // Store the columns and optional geo meta-data $this->tabular_repository->storeBulk($tabular_definition->id, $model_name, $columns); - if (!empty($geo)) { + if (! empty($geo)) { $this->geo_repository->storeBulk($tabular_definition->id, $model_name, $geo); } @@ -92,13 +92,13 @@ public function update($tabular_id, array $input) $geo = array(); - if (!empty($input['geo'])) { + if (! empty($input['geo'])) { $geo = $input['geo']; } - if (!empty($geo) || !empty($extracted_geo)) { + if (! empty($geo) || ! empty($extracted_geo)) { $geo = $this->geo_repository->validateBulk($extracted_geo, $geo); - } elseif (!isset($geo)) { + } elseif (! isset($geo)) { $geo = $this->geo_repository->getGeoProperties($tabular_id, $model_name); } @@ -115,14 +115,14 @@ public function update($tabular_id, array $input) // Check for a primary key, and add it to the columns $pk = @$input['pk']; - if (!is_null($pk) && is_numeric($pk) && $pk >= 0 && $pk < count($columns)) { + if (! is_null($pk) && is_numeric($pk) && $pk >= 0 && $pk < count($columns)) { $columns[$pk]['is_pk'] = 1; } // Store the columns and geo meta-data $this->tabular_repository->storeBulk($tabular_id, $model_name, $columns); - if (!empty($geo)) { + if (! empty($geo)) { $this->geo_repository->storeBulk($tabular_id, $model_name, $geo); } @@ -141,7 +141,7 @@ private function getModelName() * * column: index, is_pk, column_name, column_name_alias * - * @param array $input + * @param array $input * @return array columns */ abstract protected function extractColumns($input); @@ -149,8 +149,8 @@ abstract protected function extractColumns($input); /** * Process the columns and return geo properties * - * @param array $input - * @param array $columns + * @param array $input + * @param array $columns * @return array geo properties */ protected function extractGeoProperties($input, $columns) diff --git a/app/Tdt/Core/Repositories/XlsDefinitionRepository.php b/app/Tdt/Core/Repositories/XlsDefinitionRepository.php index bfb0ea38..9b0244b3 100644 --- a/app/Tdt/Core/Repositories/XlsDefinitionRepository.php +++ b/app/Tdt/Core/Repositories/XlsDefinitionRepository.php @@ -59,6 +59,7 @@ public function getCreateParameters() 'name' => 'XLS sheet', 'description' => 'The sheet name in which the tabular data resides.', 'type' => 'string', + 'default_value' => 'Sheet1' ), 'has_header_row' => array( 'required' => false, diff --git a/app/Tdt/Core/Repositories/XmlDefinitionRepository.php b/app/Tdt/Core/Repositories/XmlDefinitionRepository.php index f0fd6907..c346bcc4 100644 --- a/app/Tdt/Core/Repositories/XmlDefinitionRepository.php +++ b/app/Tdt/Core/Repositories/XmlDefinitionRepository.php @@ -8,7 +8,7 @@ class XmlDefinitionRepository extends BaseDefinitionRepository implements XmlDef { protected $rules = array( - 'uri' => 'uri|required', + 'uri' => 'uri|required|xml', 'description' => 'required', ); @@ -27,6 +27,7 @@ public function getCreateParameters() 'required' => true, 'name' => 'URI', 'description' => 'The location of the XML file, this should either be a URL or a local file location.', + 'description_xslt' => '(OPTIONAL) The location of the XSLT file, this should either be a URL or a local file location.', 'type' => 'string', ), 'title' => array( diff --git a/app/Tdt/Core/Tests/Repositories/XlsDefinitionRepositoryTest.php b/app/Tdt/Core/Tests/Repositories/XlsDefinitionRepositoryTest.php index f59ef51d..9f554d12 100644 --- a/app/Tdt/Core/Tests/Repositories/XlsDefinitionRepositoryTest.php +++ b/app/Tdt/Core/Tests/Repositories/XlsDefinitionRepositoryTest.php @@ -3,7 +3,6 @@ namespace Tdt\Core\Tests\Repositories; use Tdt\Core\Tests\TestCase; -use Symfony\Component\HttpFoundation\Request; class XlsDefinitionRepositoryTest extends TestCase { @@ -20,10 +19,8 @@ class XlsDefinitionRepositoryTest extends TestCase public function testPut() { - // Publish each XLS file in the test csv data folder. foreach ($this->test_data as $entry) { - $file = $entry['file']; $extension = $entry['extension']; $sheet = $entry['sheet']; @@ -58,7 +55,6 @@ public function testGet() $this->assertEquals(count($this->test_data), count($all)); foreach ($all as $xls_definition) { - // Test the getById $xls_definition_clone = $xls_repository->getById($xls_definition['id']); @@ -67,7 +63,6 @@ public function testGet() // Test against the properties we've stored foreach ($this->test_data as $entry) { - $file = $entry['file']; $extension = $entry['extension']; $sheet = $entry['sheet']; @@ -88,7 +83,6 @@ public function testUpdate() $all = $xls_repository->getAll(); foreach ($all as $xls_definition) { - $updated_description = 'An updated description for object with description: ' . $xls_definition['description']; $updated_definition = $xls_repository->update($xls_definition['id'], array('description' => $updated_description)); @@ -105,7 +99,6 @@ public function testDelete() $all = $xls_repository->getAll(); foreach ($all as $xls_definition) { - $result = $xls_repository->delete($xls_definition['id']); $this->assertTrue($result); diff --git a/app/Tdt/Core/Ui/DatasetController.php b/app/Tdt/Core/Ui/DatasetController.php index 127db9c1..a0aae279 100755 --- a/app/Tdt/Core/Ui/DatasetController.php +++ b/app/Tdt/Core/Ui/DatasetController.php @@ -14,7 +14,6 @@ class DatasetController extends UiController { - /** * Admin.dataset.view */ @@ -23,39 +22,45 @@ public function getIndex() // Set permission Auth::requirePermissions('admin.dataset.view'); - // Check user id - $user = \Sentry::getUser(); - + // Check user id + $user = \Sentry::getUser(); + // Get created definitions - //$definitions = \Definition::all(); - $definitions = \Definition::where('user_id', $user->id)->get(); - + $definitions = \Definition::where('user_id', $user->id)->get(); + // Get updated definitions - $defupdated_ids = \DB::table('definitions_updates') - ->join('definitions', 'definitions.username', '=', 'definitions_updates.username') - ->where('definitions_updates.username', $user->email) - ->select('definitions_updates.definition_id') + $updatedDefinitions = \DB::table('definitions_updates') + ->where('definitions_updates.user_id', $user->id) + ->select('definitions_updates.definition_id') ->get(); - - $updateddeflist = array(); - foreach ($defupdated_ids as $defid) { - $updateddeflist[] = $defid->definition_id; - } - $definitions_updated = null; - if (!empty($updateddeflist)){ - $definitions_updated = \Definition::whereIn('id', $updateddeflist) - ->get(); - } - - // Get other definitions - $definitions_others = \Definition::where('user_id', '!=' , $user->id)->get(); - + $updatedDefinitionIds = []; + + foreach ($updatedDefinitions as $updatedDefinition) { + $updatedDefinitionIds[] = $updatedDefinition->definition_id; + } + + $definitions_updated = null; + + if (! empty($updatedDefinitionIds)) { + $definitions_updated = \Definition::whereIn('id', $updatedDefinitionIds) + ->get(); + } + + // Get other definitions + $otherDefinitionsQuery = \Definition::where('user_id', '!=', $user->id); + + if (! empty($updatedDefinitionIds)) { + $otherDefinitionsQuery->whereNotIn('id', $updatedDefinitionIds); + } + + $definitions_others = $otherDefinitionsQuery->get(); + return \View::make('ui.datasets.list') ->with('title', 'Dataset management (Created/Updated/Others) | The Datatank') ->with('definitions', $definitions) - ->with('definitions_updated', $definitions_updated) - ->with('definitions_others', $definitions_others); + ->with('definitions_updated', $definitions_updated) + ->with('definitions_others', $definitions_others); } /** @@ -87,7 +92,7 @@ public function getAdd() // Filter array type parameters if (empty($object->parameters)) { // Filter Dublin core parameters - if (!empty($object->group) && $object->group == 'dc') { + if (! empty($object->group) && $object->group == 'dc') { // Fetch autocomplete DC fields if ($object->type == 'list') { $uri = $object->list; @@ -98,7 +103,7 @@ public function getAdd() $data_set = array(); foreach ($data as $o) { - if (!empty($o->{$object->list_option})) { + if (! empty($o->{$object->list_option})) { $data_set[] = $o->{$object->list_option}; } } @@ -112,7 +117,7 @@ public function getAdd() $parameters_dc[$parameter] = $object; - } elseif (!empty($object->group) && $object->group == 'geodcat') { + } elseif (! empty($object->group) && $object->group == 'geodcat') { // Filter Geo params $parameters_geodcat[$parameter] = $object; } else { @@ -128,7 +133,7 @@ public function getAdd() $data_set = array(); foreach ($data as $o) { - if (!empty($o->{$object->list_option})) { + if (! empty($o->{$object->list_option})) { $data_set[] = $o->{$object->list_option}; } } @@ -169,10 +174,11 @@ public function getAdd() // TODO special treatment for caching unset($parameters_optional['draft']); - unset($parameters_optional['draft_flag']); - unset($parameters_required['username']); - unset($parameters_required['user_id']); - unset($parameters_optional['job_id']); + unset($parameters_optional['draft_flag']); + unset($parameters_required['username']); + unset($parameters_required['user_id']); + unset($parameters_optional['job_id']); + unset($parameters_optional['xslt_file']); // Translate the parameters $parameters_required = $this->translateParameters($parameters_required, $mediatype); @@ -197,7 +203,6 @@ public function getAdd() return \Response::make($view); } - /** * Admin.dataset.update */ @@ -226,12 +231,12 @@ public function getEdit($id) $parameters_dc = array(); $parameters_geodcat = array(); $lists = array(); - + foreach ($mediatype->parameters as $parameter => $object) { // Filter array type parameters if (empty($object->parameters)) { // Filter Dublin core parameters - if (!empty($object->group) && $object->group == 'dc') { + if (! empty($object->group) && $object->group == 'dc') { // Fetch autocomplete DC fields if ($object->type == 'list') { $uri = $object->list; @@ -242,7 +247,7 @@ public function getEdit($id) $data_set = array(); foreach ($data as $o) { - if (!empty($o->{$object->list_option})) { + if (! empty($o->{$object->list_option})) { $data_set[] = $o->{$object->list_option}; } } @@ -255,7 +260,7 @@ public function getEdit($id) } $parameters_dc[$parameter] = $object; - } elseif (!empty($object->group) && $object->group == 'geodcat') { + } elseif (! empty($object->group) && $object->group == 'geodcat') { // Filter Geo params $parameters_geodcat[$parameter] = $object; } else { @@ -272,7 +277,7 @@ public function getEdit($id) $data_set = array(); foreach ($data as $o) { - if (!empty($o->{$object->list_option})) { + if (! empty($o->{$object->list_option})) { $data_set[] = $o->{$object->list_option}; } } @@ -292,18 +297,19 @@ public function getEdit($id) // Filter on unnecessary optional parameters unset($parameters_optional['cache_minutes']); unset($parameters_optional['draft']); - unset($parameters_optional['draft_flag']); - unset($parameters_optional['username']); - unset($parameters_optional['user_id']); - unset($parameters_optional['job_id']); - - // Get dataset updates information - $updates_info = \DB::table('definitions_updates') - ->where('definition_id', $id) - ->select('username','updated_at') - ->orderBy('updated_at', 'desc') - ->limit(10) - ->get(); + unset($parameters_optional['draft_flag']); + unset($parameters_optional['username']); + unset($parameters_optional['user_id']); + unset($parameters_optional['job_id']); + unset($parameters_optional['xslt_file']); + + // Get dataset updates information + $updates_info = \DB::table('definitions_updates') + ->where('definition_id', $id) + ->select('username','updated_at') + ->orderBy('updated_at', 'desc') + ->limit(10) + ->get(); return \View::make('ui.datasets.edit') ->with('title', 'Edit a dataset | The Datatank') @@ -314,7 +320,7 @@ public function getEdit($id) ->with('parameters_dc', $parameters_dc) ->with('parameters_geodcat', $parameters_geodcat) ->with('source_definition', $source_definition) - ->with('updates_info', $updates_info); + ->with('updates_info', $updates_info); return \Response::make($view); } else { @@ -327,17 +333,15 @@ public function getEdit($id) */ public function getDelete($id) { - //\App::abort(400, "Deleting dataset."); - // Set permission Auth::requirePermissions('admin.dataset.delete'); if (is_numeric($id)) { $definition = \Definition::find($id); if ($definition) { - // Delete definition updates - \DB::table('definitions_updates')->where('definition_id', $id)->delete(); - + // Delete definition updates + \DB::table('definitions_updates')->where('definition_id', $id)->delete(); + // Delete it (with cascade) $definition->delete(); } @@ -401,4 +405,40 @@ private function translateParameters($parameters, $media_type) return $translatedParameters; } + + /** + * Autocomplete endpoint "Linking Datasets" + * + * @return json + */ + public function autocompleteLinkedDatasets(){ + $term = \Input::get('term'); + $currentdef_id = \Input::get('currentdef_id'); + + $results = array(); + + if (isset($currentdef_id)) { // Editing an existing dataset + $queries = \DB::table('definitions') + ->where('title', 'LIKE', '%' . $term . '%') + ->orWhere('description', 'LIKE', '%' . $term . '%') + ->orWhere('resource_name', 'LIKE', '%' . $term . '%') + ->orWhere('collection_uri', 'LIKE', '%' . $term . '%') + ->having('id', '!=', $currentdef_id) + ->get(); + } else { // Creating a new dataset + $queries = \DB::table('definitions') + ->where('title', 'LIKE', '%' . $term . '%') + ->orWhere('description', 'LIKE', '%' . $term . '%') + ->orWhere('resource_name', 'LIKE', '%' . $term . '%') + ->orWhere('collection_uri', 'LIKE', '%' . $term . '%') + ->get(); + } + + foreach ($queries as $query) + { + $results[] = [ 'id' => $query->id, 'value' => $query->title ]; + } + + return \Response::json($results); + } } diff --git a/app/Tdt/Core/Validators/CustomValidator.php b/app/Tdt/Core/Validators/CustomValidator.php index 222ee3b3..40dc30c6 100644 --- a/app/Tdt/Core/Validators/CustomValidator.php +++ b/app/Tdt/Core/Validators/CustomValidator.php @@ -18,15 +18,17 @@ public function validateUri($attribute, $value, $parameters) try { $url_pieces = parse_url($value); - if (!filter_var($value, FILTER_VALIDATE_URL) === false && ($url_pieces['scheme'] == 'http' || $url_pieces['scheme'] == 'https')) { + if (! filter_var($value, FILTER_VALIDATE_URL) === false && ($url_pieces['scheme'] == 'http' || $url_pieces['scheme'] == 'https')) { $status = $this->getHeadInfo($value); - return $status == 200; + + return $status < 400 && $status >= 200; } else { - $data =@ file_get_contents($value); + $data = @ file_get_contents($value); - return !empty($data); + return ! empty($data); } } catch (\Exception $ex) { + \Log::error($ex->getMessage()); return false; } } @@ -49,7 +51,7 @@ private function getHeadInfo($uri) $status = curl_getinfo($c); curl_close($c); - if (!empty($status['http_code'])) { + if (! empty($status['http_code'])) { return $status['http_code']; } else { return 500; @@ -65,7 +67,7 @@ private function getRemoteData($url) curl_setopt($c, CURLOPT_SSL_VERIFYHOST, false); curl_setopt($c, CURLOPT_SSL_VERIFYPEER, false); curl_setopt($c, CURLOPT_MAXREDIRS, 10); - $follow_allowed = ( ini_get('open_basedir') || ini_get('safe_mode')) ? false:true; + $follow_allowed = ( ini_get('open_basedir') || ini_get('safe_mode')) ? false : true; if ($follow_allowed) { curl_setopt($c, CURLOPT_FOLLOWLOCATION, 1); @@ -83,6 +85,40 @@ private function getRemoteData($url) return $data; } + /** + * Validate an XML string + * + * @param string $attribute + * @param string $value + * @param array $parameters + * @return bool + */ + public function validateXml($attribute, $value, $parameters) + { + $xml_string = ''; + + if (substr($value, 0, 4) == 'http') { + $curl = curl_init(); + curl_setopt($curl, CURLOPT_URL, $value); + curl_setopt($curl, CURLOPT_RETURNTRANSFER, 1); + + $xml_string = curl_exec($curl); + + curl_close($curl); + } else { + $xml_string = file_get_contents($value); + } + + try { + if (simplexml_load_string($xml_string)) { + return true; + } + } catch (\Exception $ex) { + } + + return false; + } + /** * Check if the given value is a proper file that can be opened with fopen(). */ @@ -90,9 +126,9 @@ public function validateFile($attribute, $value, $parameters) { try { $ssl_options = array( - "ssl"=>array( - "verify_peer"=>false, - "verify_peer_name"=>false, + 'ssl' => array( + 'verify_peer' => false, + 'verify_peer_name' => false, ), ); @@ -111,12 +147,12 @@ public function validateJson($attribute, $value, $parameters) try { $data = []; - if (!filter_var($value, FILTER_VALIDATE_URL) === false) { + if (! filter_var($value, FILTER_VALIDATE_URL) === false) { $ch = curl_init(); $data = $this->getRemoteData($value); curl_close($ch); } else { - $data =@ file_get_contents($value); + $data = @ file_get_contents($value); } if (empty($data)) { @@ -135,7 +171,7 @@ public function validateJson($attribute, $value, $parameters) public function validateInstalled($attribute, $value, $parameters) { try { - $class_file = app_path() . '/../installed/' . $value; + $class_file = app_path() . '/../installed/' . $value; return file_exists($class_file); diff --git a/app/database/migrations/2017_02_01_162155_definitions_version_control.php b/app/database/migrations/2017_02_01_162155_definitions_version_control.php index e3356341..32169b99 100644 --- a/app/database/migrations/2017_02_01_162155_definitions_version_control.php +++ b/app/database/migrations/2017_02_01_162155_definitions_version_control.php @@ -15,7 +15,7 @@ public function up() Schema::table('definitions', function ($table) { $table->integer('user_id')->unsigned(); $table->string('username', 255); - $table->integer('draft_flag')->unsigned()->nullable(); + $table->integer('draft_flag')->unsigned()->nullable(); }); Schema::create('definitions_updates', function ($table) { diff --git a/app/database/migrations/2017_02_22_111625_xslt_file.php b/app/database/migrations/2017_02_22_111625_xslt_file.php new file mode 100644 index 00000000..6b60b37e --- /dev/null +++ b/app/database/migrations/2017_02_22_111625_xslt_file.php @@ -0,0 +1,30 @@ +string('xslt_file', 255)->nullable(); + }); + } + + /** + * Reverse the migrations. + * + * @return void + */ + public function down() + { + Schema::table('definitions', function ($table) { + $table->dropColumn('xslt_file'); + }); + } +} diff --git a/app/database/migrations/2017_02_22_164900_create_link_definitions_table.php b/app/database/migrations/2017_02_22_164900_create_link_definitions_table.php new file mode 100644 index 00000000..6bf55163 --- /dev/null +++ b/app/database/migrations/2017_02_22_164900_create_link_definitions_table.php @@ -0,0 +1,36 @@ +increments('id'); + $table->integer('linked_to')->unsigned()->index(); + $table->integer('linked_from')->unsigned()->index(); + $table->foreign('linked_to')->references('id')->on('definitions')->onDelete('cascade'); + $table->foreign('linked_from')->references('id')->on('definitions')->onDelete('cascade'); + + $table->string('description', 255)->nullable(); + }); + } + + /** + * Reverse the migrations. + * + * @return void + */ + public function down() + { + Schema::drop('linked_definitions'); + } + +} diff --git a/app/database/migrations/2017_03_04_183929_change_column_name_mysql_resource.php b/app/database/migrations/2017_03_04_183929_change_column_name_mysql_resource.php new file mode 100644 index 00000000..c1b924ae --- /dev/null +++ b/app/database/migrations/2017_03_04_183929_change_column_name_mysql_resource.php @@ -0,0 +1,36 @@ +renameColumn('host', 'mysql_host'); + $table->renameColumn('password', 'mysql_password'); + $table->renameColumn('port', 'mysql_port'); + $table->renameColumn('username', 'mysql_username'); + }); + } + + /** + * Reverse the migrations. + * + * @return void + */ + public function down() + { + Schema::table('mysqldefinitions', function ($table) { + $table->renameColumn('mysql_host', 'host'); + $table->renameColumn('mysql_password', 'password'); + $table->renameColumn('mysql_port', 'port'); + $table->renameColumn('mysql_username', 'username'); + }); + } +} diff --git a/app/lang/en/admin.php b/app/lang/en/admin.php index d7e20990..b53b656e 100755 --- a/app/lang/en/admin.php +++ b/app/lang/en/admin.php @@ -68,5 +68,14 @@ "menu_users" => "Users", "menu_groups" => "Groups", "created_by" => "Created by", - "updated_by" => "Updated by" + "updated_by" => "Updated by", + "link_datasets" => "Link to other datasets (optional)", + "link_datasets_select_to" => "Search and select datasets to be linked to", + "current_linked_datasets" => "Currently linked from", + "update_linked_datasets" => "Update 'linked to' datasets", + "linked_datasets_type_to_search" => "Type to search and select a dataset...", + "linked_datasets_provide_context" => "Provide some context as to why this dataset is related...", + "add_link" => "Add link", + "delete_link" => "Delete link", + "linked_datasets_alert" => "Can't delete default dataset input textbox!" ); diff --git a/app/lang/en/htmlview.php b/app/lang/en/htmlview.php index 03d77325..b32df45b 100644 --- a/app/lang/en/htmlview.php +++ b/app/lang/en/htmlview.php @@ -1,15 +1,15 @@ "Formats", - "description" => "Description", - "source_type" => "Source type", - "license" => "License", - "contact" => "Contact", - "publisher" => "Publisher", - "keywords" => "Keywords", - "spatial" => "Geographic context", - "original_file" => "Original file", - "created_by" => "Created by", - "updated_by" => "Updated by" + 'formats' => 'Formats', + 'description' => 'Description', + 'source_type' => 'Source type', + 'license' => 'License', + 'contact' => 'Contact', + 'publisher' => 'Publisher', + 'keywords' => 'Keywords', + 'spatial' => 'Geographic context', + 'original_file' => 'Original file', + 'created_by' => 'Created by', + 'updated_by' => 'Last updates made by' ]; diff --git a/app/lang/en/parameters.php b/app/lang/en/parameters.php index 82da6ddf..deb24016 100755 --- a/app/lang/en/parameters.php +++ b/app/lang/en/parameters.php @@ -81,16 +81,16 @@ 'mongo_username_desc' => 'A username that has read permissions on the provided collection. Safety first, make sure the user only has read permissions.', 'mongo_password' => 'Password', 'mongo_password_desc' => 'The password for the user that has read permissions.', - 'mysql_host' => 'Host', - 'mysql_host_desc' => 'The host of the MySQL database.', - 'mysql_port' => 'Port', - 'mysql_port_desc' => 'The port of the MySQL database where a connection can be made to.', + 'mysql_mysql_host' => 'Host', + 'mysql_mysql_host_desc' => 'The host of the MySQL database.', + 'mysql_mysql_port' => 'Port', + 'mysql_mysql_port_desc' => 'The port of the MySQL database where a connection can be made to.', 'mysql_database' => 'Database', 'mysql_database_desc' => 'The name of the database where the datatable, that needs to be published, resides.', - 'mysql_username' => 'Username', - 'mysql_username_desc' => 'A username that has read permissions on the provided datatable. Safety first, make sure the user only has read permissions.', - 'mysql_password' => 'Password', - 'mysql_password_desc' => 'The password for the user that has read permissions.', + 'mysql_mysql_username' => 'Username', + 'mysql_mysql_username_desc' => 'A username that has read permissions on the provided datatable. Safety first, make sure the user only has read permissions.', + 'mysql_mysql_password' => 'Password', + 'mysql_mysql_password_desc' => 'The password for the user that has read permissions.', 'mysql_collation' => 'Collation', 'mysql_collation_desc' => 'The collation of the datatable.', 'mysql_pk' => 'Primary key', diff --git a/app/lang/en/validation.php b/app/lang/en/validation.php index 5a24a40c..17fc0de8 100644 --- a/app/lang/en/validation.php +++ b/app/lang/en/validation.php @@ -13,61 +13,62 @@ | */ - "accepted" => "The :attribute must be accepted.", - "active_url" => "The :attribute is not a valid URL.", - "after" => "The :attribute must be a date after :date.", - "alpha" => "The :attribute may only contain letters.", - "alpha_dash" => "The :attribute may only contain letters, numbers, and dashes.", - "alpha_num" => "The :attribute may only contain letters and numbers.", - "array" => "The :attribute must be an array.", - "before" => "The :attribute must be a date before :date.", - "between" => array( - "numeric" => "The :attribute must be between :min - :max.", - "file" => "The :attribute must be between :min - :max kilobytes.", - "string" => "The :attribute must be between :min - :max characters.", - "array" => "The :attribute must have between :min - :max items.", + 'accepted' => 'The :attribute must be accepted.', + 'active_url' => 'The :attribute is not a valid URL.', + 'after' => 'The :attribute must be a date after :date.', + 'alpha' => 'The :attribute may only contain letters.', + 'alpha_dash' => 'The :attribute may only contain letters, numbers, and dashes.', + 'alpha_num' => 'The :attribute may only contain letters and numbers.', + 'array' => 'The :attribute must be an array.', + 'before' => 'The :attribute must be a date before :date.', + 'between' => array( + 'numeric' => 'The :attribute must be between :min - :max.', + 'file' => 'The :attribute must be between :min - :max kilobytes.', + 'string' => 'The :attribute must be between :min - :max characters.', + 'array' => 'The :attribute must have between :min - :max items.', ), - "confirmed" => "The :attribute confirmation does not match.", - "date" => "The :attribute is not a valid date.", - "date_format" => "The :attribute does not match the format :format.", - "different" => "The :attribute and :other must be different.", - "digits" => "The :attribute must be :digits digits.", - "digits_between" => "The :attribute must be between :min and :max digits.", - "email" => "The :attribute format is invalid.", - "exists" => "The selected :attribute is invalid.", - "image" => "The :attribute must be an image.", - "in" => "The selected :attribute is invalid.", - "integer" => "The :attribute must be an integer.", - "ip" => "The :attribute must be a valid IP address.", - "max" => array( - "numeric" => "The :attribute may not be greater than :max.", - "file" => "The :attribute may not be greater than :max kilobytes.", - "string" => "The :attribute may not be greater than :max characters.", - "array" => "The :attribute may not have more than :max items.", + 'confirmed' => 'The :attribute confirmation does not match.', + 'date' => 'The :attribute is not a valid date.', + 'date_format' => 'The :attribute does not match the format :format.', + 'different' => 'The :attribute and :other must be different.', + 'digits' => 'The :attribute must be :digits digits.', + 'digits_between' => 'The :attribute must be between :min and :max digits.', + 'email' => 'The :attribute format is invalid.', + 'exists' => 'The selected :attribute is invalid.', + 'image' => 'The :attribute must be an image.', + 'in' => 'The selected :attribute is invalid.', + 'integer' => 'The :attribute must be an integer.', + 'ip' => 'The :attribute must be a valid IP address.', + 'max' => array( + 'numeric' => 'The :attribute may not be greater than :max.', + 'file' => 'The :attribute may not be greater than :max kilobytes.', + 'string' => 'The :attribute may not be greater than :max characters.', + 'array' => 'The :attribute may not have more than :max items.', ), - "mimes" => "The :attribute must be a file of type: :values.", - "min" => array( - "numeric" => "The :attribute must be at least :min.", - "file" => "The :attribute must be at least :min kilobytes.", - "string" => "The :attribute must be at least :min characters.", - "array" => "The :attribute must have at least :min items.", + 'mimes' => 'The :attribute must be a file of type: :values.', + 'min' => array( + 'numeric' => 'The :attribute must be at least :min.', + 'file' => 'The :attribute must be at least :min kilobytes.', + 'string' => 'The :attribute must be at least :min characters.', + 'array' => 'The :attribute must have at least :min items.', ), - "not_in" => "The selected :attribute is invalid.", - "numeric" => "The :attribute must be a number.", - "regex" => "The :attribute format is invalid.", - "required" => "The :attribute field is required.", - "required_if" => "The :attribute field is required when :other is :value.", - "required_with" => "The :attribute field is required when :values is present.", - "required_without" => "The :attribute field is required when :values is not present.", - "same" => "The :attribute and :other must match.", - "size" => array( - "numeric" => "The :attribute must be :size.", - "file" => "The :attribute must be :size kilobytes.", - "string" => "The :attribute must be :size characters.", - "array" => "The :attribute must contain :size items.", + 'not_in' => 'The selected :attribute is invalid.', + 'numeric' => 'The :attribute must be a number.', + 'regex' => 'The :attribute format is invalid.', + 'required' => 'The :attribute field is required.', + 'required_if' => 'The :attribute field is required when :other is :value.', + 'required_with' => 'The :attribute field is required when :values is present.', + 'required_without' => 'The :attribute field is required when :values is not present.', + 'same' => 'The :attribute and :other must match.', + 'size' => array( + 'numeric' => 'The :attribute must be :size.', + 'file' => 'The :attribute must be :size kilobytes.', + 'string' => 'The :attribute must be :size characters.', + 'array' => 'The :attribute must contain :size items.', ), - "unique" => "The :attribute has already been taken.", - "url" => "The :attribute format is invalid.", + 'unique' => 'The :attribute has already been taken.', + 'url' => 'The :attribute format is invalid.', + 'xml' => 'The XML is not valid', /* |-------------------------------------------------------------------------- diff --git a/app/lang/fr/admin.php b/app/lang/fr/admin.php index 27a95e29..b6d21aee 100644 --- a/app/lang/fr/admin.php +++ b/app/lang/fr/admin.php @@ -62,5 +62,14 @@ "menu_groups" => "Groupes", "geodcat_header" => "Localisez la zone concernée", "created_by" => "Créé par", - "updated_by" => "Mise à jour" + "updated_by" => "Mise à jour", + "link_datasets" => "Link to other datasets (optional)", + "link_datasets_select_to" => "Search and select datasets to be linked to", + "current_linked_datasets" => "Currently linked from", + "update_linked_datasets" => "Update 'linked to' datasets", + "linked_datasets_type_to_search" => "Type to search and select a dataset...", + "linked_datasets_provide_context" => "Provide some context as to why this dataset is related...", + "add_link" => "Add link", + "delete_link" => "Delete link", + "linked_datasets_alert" => "Can't delete default dataset input textbox!" ); diff --git a/app/lang/fr/parameters.php b/app/lang/fr/parameters.php index b0139089..be4a881a 100755 --- a/app/lang/fr/parameters.php +++ b/app/lang/fr/parameters.php @@ -77,16 +77,16 @@ 'mongo_username_desc' => "Un nom d'utilisateur qui a lu des autorisations sur la collection fournie. Sécurité d'abord, assurez-vous que l'utilisateur a lu des autorisations uniquement.", 'mongo_password' => 'Mot de passe', 'mongo_password_desc' => "Le mot de passe pour l'utilisateur qui a les autorisations de lecture.", - 'mysql_host' => 'Hôte', - 'mysql_host_desc' => "L'hôte de la base de données MySQL.", - 'mysql_port' => 'Port', - 'mysql_port_desc' => 'Le port de la base de données MySQL où une connexion peut être mis en place.', + 'mysql_mysql_host' => 'Hôte', + 'mysql_mysql_host_desc' => "L'hôte de la base de données MySQL.", + 'mysql_mysql_port' => 'Port', + 'mysql_mysql_port_desc' => 'Le port de la base de données MySQL où une connexion peut être mis en place.', 'mysql_database' => 'Base de données', 'mysql_database_desc' => 'Le nom de la base de données où la datatable, qui doit être publié, réside.', - 'mysql_username' => "Nom d'utilisateur", - 'mysql_username_desc' => "Un nom d'utilisateur qui a lu des autorisations sur la collection fournie. Sécurité d'abord, assurez-vous que l'utilisateur a lu des autorisations uniquement.", - 'mysql_password' => 'Mot de passe', - 'mysql_password_desc' => "Le mot de passe pour l'utilisateur qui a les autorisations de lecture.", + 'mysql_mysql_username' => "Nom d'utilisateur", + 'mysql_mysql_username_desc' => "Un nom d'utilisateur qui a lu des autorisations sur la collection fournie. Sécurité d'abord, assurez-vous que l'utilisateur a lu des autorisations uniquement.", + 'mysql_mysql_password' => 'Mot de passe', + 'mysql_mysql_password_desc' => "Le mot de passe pour l'utilisateur qui a les autorisations de lecture.", 'mysql_collation' => 'Collation', 'mysql_collation_desc' => 'La classement de la datatable', 'mysql_pk' => 'Clé primaire', @@ -127,7 +127,7 @@ 'sparql_query_desc' => 'La requête à être exécutée.', 'sparql_endpoint_user' => "Nom d'utilisateur du SPARQL endpoint", 'sparql_endpoint_user_desc' => "Un nom d'utilisateur qui a lu des autorisations sur la collection fournie. Sécurité d'abord, assurez-vous que l'utilisateur a lu des autorisations uniquement.", - 'sparql_endpoint_password' => "Mot de passe", + 'sparql_endpoint_password' => 'Mot de passe', 'sparql_endpoint_password_desc' => "Le mot de passe pour l'utilisateur qui a les autorisations de lecture.", 'xls_uri' => 'URI', 'xls_uri_desc' => "L'emplacement du fichier XLS, une URL ou un emplacement de fichier local.", @@ -163,24 +163,24 @@ 'installed_title_desc' => 'Un nom donné à la ressource.', 'installed_description' => 'Description', 'installed_description_desc' => "Le texte descriptif ou d'information qui fournit un certain contexte pour vous publié des ensembles de données.", - 'elasticsearch_description' => "Description", + 'elasticsearch_description' => 'Description', 'elasticsearch_description_desc' => "Le texte descriptif ou d'information qui fournit un certain contexte pour vous publié des ensembles de données.", - 'elasticsearch_host' => "Hôte", + 'elasticsearch_host' => 'Hôte', 'elasticsearch_host_desc' => "L'hôte d'Elasticsearch.", - 'elasticsearch_es_type' => "Type", + 'elasticsearch_es_type' => 'Type', 'elasticsearch_es_type_desc' => "Le type de données à l'intérieur de l'index.", - 'elasticsearch_es_index' => "Index", + 'elasticsearch_es_index' => 'Index', 'elasticsearch_es_index_desc' => "Le nom de l'index Elasticsearch.", - 'elasticsearch_port' => "Port", - 'elasticsearch_port_desc' => "Le port de la Elasticsearch.", + 'elasticsearch_port' => 'Port', + 'elasticsearch_port_desc' => 'Le port de la Elasticsearch.', 'elasticsearch_username' => "Nom d'utilisateur", 'elasticsearch_username_desc' => "Un nom d'utilisateur qui a lu des autorisations sur la collection fournie. Sécurité d'abord, assurez-vous que l'utilisateur a lu des autorisations uniquement.", - 'elasticsearch_password' => "Mot de passe", + 'elasticsearch_password' => 'Mot de passe', 'elasticsearch_password_desc' => "Le mot de passe pour l'utilisateur qui a les autorisations de lecture.", - + 'csv_job_id' => 'Related Job', 'csv_job_id_desc' => 'Job linked to this dataset.', 'csv_original_file' => 'Original file', - 'csv_original_file_desc' => 'Original dataset file.', + 'csv_original_file_desc' => 'Original dataset file.', ); diff --git a/app/lang/nl/admin.php b/app/lang/nl/admin.php index f7ab0633..ad70db17 100644 --- a/app/lang/nl/admin.php +++ b/app/lang/nl/admin.php @@ -62,5 +62,14 @@ "menu_groups" => "Groepen", "geodcat_header" => "Selecteer de relevante zone", "created_by" => "Gemaakt door", - "updated_by" => "Geactualiseerd door" + "updated_by" => "Geactualiseerd door", + "link_datasets" => "Link to other datasets (optional)", + "link_datasets_select_to" => "Search and select datasets to be linked to", + "current_linked_datasets" => "Currently linked from", + "update_linked_datasets" => "Update 'linked to' datasets", + "linked_datasets_type_to_search" => "Type to search and select a dataset...", + "linked_datasets_provide_context" => "Provide some context as to why this dataset is related...", + "add_link" => "Add link", + "delete_link" => "Delete link", + "linked_datasets_alert" => "Can't delete default dataset input textbox!" ); diff --git a/app/lang/nl/htmlview.php b/app/lang/nl/htmlview.php index c6067cfa..0a135bac 100644 --- a/app/lang/nl/htmlview.php +++ b/app/lang/nl/htmlview.php @@ -1,15 +1,15 @@ "Formaten", - "description" => "Beschrijving", - "source_type" => "Bronbestand", - "license" => "Licentie", - "contact" => "Contact", - "publisher" => "Uitgever", - "keywords" => "Trefwoorden", - "spatial" => "Geografische context", - "original_file" => "Origineel bestand", - "created_by" => "Gemaakt door", - "updated_by" => "Geactualiseerd door" + 'formats' => 'Formaten', + 'description' => 'Beschrijving', + 'source_type' => 'Bronbestand', + 'license' => 'Licentie', + 'contact' => 'Contact', + 'publisher' => 'Uitgever', + 'keywords' => 'Trefwoorden', + 'spatial' => 'Geografische context', + 'original_file' => 'Origineel bestand', + 'created_by' => 'Gemaakt door', + 'updated_by' => 'Laatst aangepast door' ]; diff --git a/app/lang/nl/parameters.php b/app/lang/nl/parameters.php index c6583a65..04ac307a 100755 --- a/app/lang/nl/parameters.php +++ b/app/lang/nl/parameters.php @@ -75,16 +75,16 @@ 'mongo_username_desc' => 'Een gebruikersnaam die leesrechten heeft op de collectie.', 'mongo_password' => 'Wachtwoord', 'mongo_password_desc' => 'Het wachtwoord van de gebruiker.', - 'mysql_host' => 'Host', - 'mysql_host_desc' => 'De host van de MySQL database.', - 'mysql_port' => 'Poort', - 'mysql_port_desc' => 'De poort van de MySQL database waar een verbinding mee kan gemaakt worden.', + 'mysql_mysql_host' => 'Host', + 'mysql_mysql_host_desc' => 'De host van de MySQL database.', + 'mysql_mysql_port' => 'Poort', + 'mysql_mysql_port_desc' => 'De poort van de MySQL database waar een verbinding mee kan gemaakt worden.', 'mysql_database' => 'Database', 'mysql_database_desc' => 'De naam van de database waar de tabel die gepubliceerd moet worden, in zit.', - 'mysql_username' => 'Gebruikersnaam', - 'mysql_username_desc' => 'Een gebruikersnaam die leesrechten heeft op de collectie.', - 'mysql_password' => 'Wachtwoord', - 'mysql_password_desc' => 'Het wachtwoord van de gebruiker.', + 'mysql_mysql_username' => 'Gebruikersnaam', + 'mysql_mysql_username_desc' => 'Een gebruikersnaam die leesrechten heeft op de collectie.', + 'mysql_mysql_password' => 'Wachtwoord', + 'mysql_mysql_password_desc' => 'Het wachtwoord van de gebruiker.', 'mysql_collation' => 'Collatie', 'mysql_collation_desc' => 'De collatie van de datatabel.', 'mysql_pk' => 'Unieke ID', @@ -123,7 +123,7 @@ 'sparql_query_desc' => 'De query waarvan de resultaten als open data worden gepubliceerd.', 'sparql_endpoint_user' => 'Gebruikersnaam', 'sparql_endpoint_user_desc' => 'De gebruiker die voldoende rechten heeft om het SPARQL endpoint te bevragen.', - 'sparql_endpoint_password' => "Wachtwoord", + 'sparql_endpoint_password' => 'Wachtwoord', 'sparql_endpoint_password_desc' => 'Wachtwoord van de gebruiker.', 'xls_uri' => 'URI', 'xls_uri_desc' => 'De locatie van het XLS file, dit kan een URI zijn of een lokaal pad.', @@ -171,9 +171,9 @@ 'elasticsearch_username_desc' => 'De gebruiker die voldoende rechten heeft om het SPARQL endpoint te bevragen.', 'elasticsearch_password' => 'Wachtwoord', 'elasticsearch_password_desc' => 'Wachtwoord van de gebruiker.', - + 'csv_job_id' => 'Related Job', 'csv_job_id_desc' => 'Job linked to this dataset.', 'csv_original_file' => 'Original file', - 'csv_original_file_desc' => 'Original dataset file.', + 'csv_original_file_desc' => 'Original dataset file.', ); diff --git a/app/models/Definition.php b/app/models/Definition.php index 06ee09ff..d13eacf6 100755 --- a/app/models/Definition.php +++ b/app/models/Definition.php @@ -29,6 +29,7 @@ class Definition extends Eloquent 'original_file', 'user_id', 'username', + 'xslt_file', ); /** @@ -64,4 +65,20 @@ public function facets() { return $this->hasMany('Facet'); } + + /** + * Return "linked from" definitions from pivot table "link_definitions" for this model. + */ + public function linkedFrom() + { + return $this->belongsToMany('Definition', 'linked_definitions', 'linked_to', 'linked_from')->withPivot('description'); + } + + /** + * Return "linked to" definitions from pivot table "link_definitions" for this model. + */ + public function linkedTo() + { + return $this->belongsToMany('Definition', 'linked_definitions', 'linked_from', 'linked_to')->withPivot('description'); + } } diff --git a/app/models/sourcetypes/MysqlDefinition.php b/app/models/sourcetypes/MysqlDefinition.php index ee892c7c..c67b275f 100644 --- a/app/models/sourcetypes/MysqlDefinition.php +++ b/app/models/sourcetypes/MysqlDefinition.php @@ -12,7 +12,7 @@ class MysqlDefinition extends SourceType protected $table = 'mysqldefinitions'; - protected $fillable = array('host', 'port', 'database', 'username', 'password', 'collation', 'query', 'description'); + protected $fillable = array('mysql_host', 'mysql_port', 'database', 'mysql_username', 'mysql_password', 'collation', 'query', 'description'); /** * Relationship with the TabularColumns model. @@ -39,7 +39,6 @@ public function __get($name) { if ($name == 'pk') { - // Retrieve the primary key from the columns // Get the related columns $columns = $this->tabularColumns()->getResults(); diff --git a/app/routes.php b/app/routes.php index 26f22de8..2cfa7d84 100755 --- a/app/routes.php +++ b/app/routes.php @@ -52,14 +52,32 @@ '/ /' => '_', // nonbreaking space (equiv. to 0x160) ]; - $file = strtolower(preg_replace(array_keys($utf8), array_values($utf8), Input::file('fileupload')->getClientOriginalName())); + $file_xslt_upload = Input::file('fileupload_xslt'); - return Input::file('fileupload')->move( - app_path() . '/storage/app/', - $file . '_' . time() . '.' . Input::file('fileupload')->getClientOriginalExtension() - ); + if(isset($file_xslt_upload)) { + $file_xslt = strtolower(preg_replace(array_keys($utf8), array_values($utf8), Input::file('fileupload_xslt')->getClientOriginalName())); + } + + if(isset($file_xslt)){ + Input::file('fileupload_xslt')->move( + app_path() . '/storage/app/', + $file_xslt . '_' . date('Y-m-d') . '.' . Input::file('fileupload_xslt')->getClientOriginalExtension() + ); + } + + if (! empty(Input::file('fileupload'))) { + $file = strtolower(preg_replace(array_keys($utf8), array_values($utf8), Input::file('fileupload')->getClientOriginalName())); + + return Input::file('fileupload')->move( + app_path() . '/storage/app/', + $file . '_' . time() . '.' . Input::file('fileupload')->getClientOriginalExtension() + ); + } }); +/* Autocomplete endpoint "Linking Datasets" */ +Route::get('/search/autocomplete', 'Tdt\\Core\\Ui\\DatasetController@autocompleteLinkedDatasets'); + /* * IMPORTANT! * The catch-all route to catch all other request is added last to allow packages to still have their own routes @@ -81,7 +99,7 @@ App::error(function ($exception, $code) { // Log error - Log::error($exception); + \Log::error($exception); // Check Accept-header $accept_header = \Request::header('Accept'); diff --git a/app/views/dataset/partials/details.blade.php b/app/views/dataset/partials/details.blade.php index 2ae63195..38e176f8 100755 --- a/app/views/dataset/partials/details.blade.php +++ b/app/views/dataset/partials/details.blade.php @@ -22,7 +22,7 @@ @endforeach - + @if(Sentry::check())
  • {{ trans('htmlview.created_by') }}
    @@ -37,8 +37,8 @@ @endforeach @endif
  • - @endif - + @endif + @if(!empty($source_definition['description']))
  • {{ trans('htmlview.description') }}
    diff --git a/app/views/layouts/admin.blade.php b/app/views/layouts/admin.blade.php index b8edcf91..9364236a 100644 --- a/app/views/layouts/admin.blade.php +++ b/app/views/layouts/admin.blade.php @@ -3,12 +3,14 @@ {{ $title }} + + diff --git a/app/views/ui/datasets/add.blade.php b/app/views/ui/datasets/add.blade.php index 2b018e67..b7d63c21 100755 --- a/app/views/ui/datasets/add.blade.php +++ b/app/views/ui/datasets/add.blade.php @@ -1,7 +1,6 @@ @extends('layouts.admin') @section('content') -
    @@ -60,7 +59,7 @@
    -

    2. {{ trans('admin.select_type') }}

    +

    3. {{ trans('admin.select_type') }}