mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Merge branch 'master' into auto
Conflicts: .gitmodules
This commit is contained in:
19
.gitmodules
vendored
19
.gitmodules
vendored
@@ -82,9 +82,6 @@
|
|||||||
[submodule "vendor/grammars/language-python"]
|
[submodule "vendor/grammars/language-python"]
|
||||||
path = vendor/grammars/language-python
|
path = vendor/grammars/language-python
|
||||||
url = https://github.com/atom/language-python
|
url = https://github.com/atom/language-python
|
||||||
[submodule "vendor/grammars/language-sass"]
|
|
||||||
path = vendor/grammars/language-sass
|
|
||||||
url = https://github.com/atom/language-sass
|
|
||||||
[submodule "vendor/grammars/language-shellscript"]
|
[submodule "vendor/grammars/language-shellscript"]
|
||||||
path = vendor/grammars/language-shellscript
|
path = vendor/grammars/language-shellscript
|
||||||
url = https://github.com/atom/language-shellscript
|
url = https://github.com/atom/language-shellscript
|
||||||
@@ -506,7 +503,6 @@
|
|||||||
[submodule "vendor/grammars/sublime-mask"]
|
[submodule "vendor/grammars/sublime-mask"]
|
||||||
path = vendor/grammars/sublime-mask
|
path = vendor/grammars/sublime-mask
|
||||||
url = https://github.com/tenbits/sublime-mask
|
url = https://github.com/tenbits/sublime-mask
|
||||||
branch = release
|
|
||||||
[submodule "vendor/grammars/sublime_cobol"]
|
[submodule "vendor/grammars/sublime_cobol"]
|
||||||
path = vendor/grammars/sublime_cobol
|
path = vendor/grammars/sublime_cobol
|
||||||
url = https://bitbucket.org/bitlang/sublime_cobol
|
url = https://bitbucket.org/bitlang/sublime_cobol
|
||||||
@@ -535,3 +531,18 @@
|
|||||||
[submodule "vendor/grammars/AutoHotkey"]
|
[submodule "vendor/grammars/AutoHotkey"]
|
||||||
path = vendor/grammars/AutoHotkey
|
path = vendor/grammars/AutoHotkey
|
||||||
url = https://github.com/robertcollier4/AutoHotkey
|
url = https://github.com/robertcollier4/AutoHotkey
|
||||||
|
[submodule "vendor/grammars/Sublime-HTTP"]
|
||||||
|
path = vendor/grammars/Sublime-HTTP
|
||||||
|
url = https://github.com/httpspec/sublime-highlighting
|
||||||
|
[submodule "vendor/grammars/sass-textmate-bundle"]
|
||||||
|
path = vendor/grammars/sass-textmate-bundle
|
||||||
|
url = https://github.com/nathos/sass-textmate-bundle
|
||||||
|
[submodule "vendor/grammars/carto-atom"]
|
||||||
|
path = vendor/grammars/carto-atom
|
||||||
|
url = https://github.com/yohanboniface/carto-atom
|
||||||
|
[submodule "vendor/grammars/Sublime-Nit"]
|
||||||
|
path = vendor/grammars/Sublime-Nit
|
||||||
|
url = https://github.com/R4PaSs/Sublime-Nit
|
||||||
|
[submodule "vendor/grammars/language-hy"]
|
||||||
|
path = vendor/grammars/language-hy
|
||||||
|
url = https://github.com/rwtolbert/language-hy
|
||||||
|
|||||||
12
.travis.yml
12
.travis.yml
@@ -1,12 +1,5 @@
|
|||||||
before_install:
|
sudo: false
|
||||||
- git fetch origin master:master
|
before_install: script/travis/before_install
|
||||||
- git fetch origin v2.0.0:v2.0.0
|
|
||||||
- git fetch origin test/attributes:test/attributes
|
|
||||||
- git fetch origin test/master:test/master
|
|
||||||
- sudo apt-get install libicu-dev -y
|
|
||||||
- git submodule init
|
|
||||||
- git submodule sync --quiet
|
|
||||||
- script/fast-submodule-update
|
|
||||||
rvm:
|
rvm:
|
||||||
- 1.9.3
|
- 1.9.3
|
||||||
- 2.0.0
|
- 2.0.0
|
||||||
@@ -16,3 +9,4 @@ notifications:
|
|||||||
disabled: true
|
disabled: true
|
||||||
git:
|
git:
|
||||||
submodules: false
|
submodules: false
|
||||||
|
cache: bundler
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ Great! You'll need to:
|
|||||||
0. Add an entry for your language to [`languages.yml`][languages].
|
0. Add an entry for your language to [`languages.yml`][languages].
|
||||||
0. Add a grammar for your language. Please only add grammars that have a license that permits redistribution.
|
0. Add a grammar for your language. Please only add grammars that have a license that permits redistribution.
|
||||||
0. Add your grammar as a submodule: `git submodule add https://github.com/JaneSmith/MyGrammar vendor/grammars/MyGrammar`.
|
0. Add your grammar as a submodule: `git submodule add https://github.com/JaneSmith/MyGrammar vendor/grammars/MyGrammar`.
|
||||||
0. Add your grammar to [`grammars.yml`][grammars] by running `script/download-grammars --add vendor/grammars/MyGrammar`.
|
0. Add your grammar to [`grammars.yml`][grammars] by running `script/convert-grammars --add vendor/grammars/MyGrammar`.
|
||||||
0. Add samples for your language to the [samples directory][samples].
|
0. Add samples for your language to the [samples directory][samples].
|
||||||
|
|
||||||
In addition, if your new language defines an extension that's already listed in [`languages.yml`][languages] (such as `.foo`) then sometimes a few more steps will need to be taken:
|
In addition, if your new language defines an extension that's already listed in [`languages.yml`][languages] (such as `.foo`) then sometimes a few more steps will need to be taken:
|
||||||
|
|||||||
2
Rakefile
2
Rakefile
@@ -48,7 +48,7 @@ end
|
|||||||
|
|
||||||
task :build_grammars_gem do
|
task :build_grammars_gem do
|
||||||
rm_rf "grammars"
|
rm_rf "grammars"
|
||||||
sh "script/download-grammars"
|
sh "script/convert-grammars"
|
||||||
sh "gem", "build", "github-linguist-grammars.gemspec"
|
sh "gem", "build", "github-linguist-grammars.gemspec"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|||||||
20
grammars.yml
20
grammars.yml
@@ -49,9 +49,9 @@ vendor/grammars/LiveScript.tmbundle:
|
|||||||
vendor/grammars/NSIS:
|
vendor/grammars/NSIS:
|
||||||
- source.nsis
|
- source.nsis
|
||||||
vendor/grammars/NimLime:
|
vendor/grammars/NimLime:
|
||||||
- source.nimrod
|
- source.nim
|
||||||
- source.nimrod_filter
|
- source.nim_filter
|
||||||
- source.nimrodcfg
|
- source.nimcfg
|
||||||
vendor/grammars/PHP-Twig.tmbundle:
|
vendor/grammars/PHP-Twig.tmbundle:
|
||||||
- text.html.twig
|
- text.html.twig
|
||||||
vendor/grammars/RDoc.tmbundle:
|
vendor/grammars/RDoc.tmbundle:
|
||||||
@@ -68,6 +68,8 @@ vendor/grammars/Stata.tmbundle:
|
|||||||
- source.stata
|
- source.stata
|
||||||
vendor/grammars/Sublime-Coq:
|
vendor/grammars/Sublime-Coq:
|
||||||
- source.coq
|
- source.coq
|
||||||
|
vendor/grammars/Sublime-HTTP:
|
||||||
|
- source.httpspec
|
||||||
vendor/grammars/Sublime-Inform:
|
vendor/grammars/Sublime-Inform:
|
||||||
- source.Inform7
|
- source.Inform7
|
||||||
vendor/grammars/Sublime-Lasso:
|
vendor/grammars/Sublime-Lasso:
|
||||||
@@ -76,6 +78,8 @@ vendor/grammars/Sublime-Logos:
|
|||||||
- source.logos
|
- source.logos
|
||||||
vendor/grammars/Sublime-Loom:
|
vendor/grammars/Sublime-Loom:
|
||||||
- source.loomscript
|
- source.loomscript
|
||||||
|
vendor/grammars/Sublime-Nit:
|
||||||
|
- source.nit
|
||||||
vendor/grammars/Sublime-QML:
|
vendor/grammars/Sublime-QML:
|
||||||
- source.qml
|
- source.qml
|
||||||
vendor/grammars/Sublime-REBOL:
|
vendor/grammars/Sublime-REBOL:
|
||||||
@@ -138,6 +142,8 @@ vendor/grammars/c.tmbundle:
|
|||||||
- source.c.platform
|
- source.c.platform
|
||||||
vendor/grammars/capnproto.tmbundle:
|
vendor/grammars/capnproto.tmbundle:
|
||||||
- source.capnp
|
- source.capnp
|
||||||
|
vendor/grammars/carto-atom:
|
||||||
|
- source.css.mss
|
||||||
vendor/grammars/ceylon-sublimetext:
|
vendor/grammars/ceylon-sublimetext:
|
||||||
- module.ceylon
|
- module.ceylon
|
||||||
- source.ceylon
|
- source.ceylon
|
||||||
@@ -255,16 +261,16 @@ vendor/grammars/language-csharp:
|
|||||||
- source.nant-build
|
- source.nant-build
|
||||||
vendor/grammars/language-gfm:
|
vendor/grammars/language-gfm:
|
||||||
- source.gfm
|
- source.gfm
|
||||||
|
vendor/grammars/language-hy:
|
||||||
|
- source.hy
|
||||||
vendor/grammars/language-javascript:
|
vendor/grammars/language-javascript:
|
||||||
- source.js
|
- source.js
|
||||||
- source.js.regexp
|
- source.js.regexp
|
||||||
vendor/grammars/language-python:
|
vendor/grammars/language-python:
|
||||||
- source.python
|
- source.python
|
||||||
- source.regexp.python
|
- source.regexp.python
|
||||||
|
- text.python.console
|
||||||
- text.python.traceback
|
- text.python.traceback
|
||||||
vendor/grammars/language-sass:
|
|
||||||
- source.css.scss
|
|
||||||
- source.sass
|
|
||||||
vendor/grammars/language-shellscript:
|
vendor/grammars/language-shellscript:
|
||||||
- source.shell
|
- source.shell
|
||||||
- text.shell-session
|
- text.shell-session
|
||||||
@@ -376,6 +382,8 @@ vendor/grammars/ruby.tmbundle:
|
|||||||
vendor/grammars/sas.tmbundle:
|
vendor/grammars/sas.tmbundle:
|
||||||
- source.SASLog
|
- source.SASLog
|
||||||
- source.sas
|
- source.sas
|
||||||
|
vendor/grammars/sass-textmate-bundle:
|
||||||
|
- source.sass
|
||||||
vendor/grammars/scala.tmbundle:
|
vendor/grammars/scala.tmbundle:
|
||||||
- source.sbt
|
- source.sbt
|
||||||
- source.scala
|
- source.scala
|
||||||
|
|||||||
@@ -175,7 +175,7 @@ module Linguist
|
|||||||
disambiguate "Frege", "Forth", "Text" do |data|
|
disambiguate "Frege", "Forth", "Text" do |data|
|
||||||
if /^(: |also |new-device|previous )/.match(data)
|
if /^(: |also |new-device|previous )/.match(data)
|
||||||
Language["Forth"]
|
Language["Forth"]
|
||||||
elsif /\s*(import|module|package|data|type) /.match(data)
|
elsif /^\s*(import|module|package|data|type) /.match(data)
|
||||||
Language["Frege"]
|
Language["Frege"]
|
||||||
else
|
else
|
||||||
Language["Text"]
|
Language["Text"]
|
||||||
|
|||||||
@@ -450,12 +450,13 @@ Cap'n Proto:
|
|||||||
ace_mode: text
|
ace_mode: text
|
||||||
|
|
||||||
CartoCSS:
|
CartoCSS:
|
||||||
|
type: programming
|
||||||
aliases:
|
aliases:
|
||||||
- Carto
|
- Carto
|
||||||
extensions:
|
extensions:
|
||||||
- .mss
|
- .mss
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
tm_scope: none
|
tm_scope: source.css.mss
|
||||||
|
|
||||||
Ceylon:
|
Ceylon:
|
||||||
type: programming
|
type: programming
|
||||||
@@ -1211,7 +1212,7 @@ HTTP:
|
|||||||
type: data
|
type: data
|
||||||
extensions:
|
extensions:
|
||||||
- .http
|
- .http
|
||||||
tm_scope: none
|
tm_scope: source.httpspec
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
|
|
||||||
Hack:
|
Hack:
|
||||||
@@ -1267,13 +1268,13 @@ Haxe:
|
|||||||
|
|
||||||
Hy:
|
Hy:
|
||||||
type: programming
|
type: programming
|
||||||
ace_mode: clojure
|
ace_mode: text
|
||||||
color: "#7891b1"
|
color: "#7891b1"
|
||||||
extensions:
|
extensions:
|
||||||
- .hy
|
- .hy
|
||||||
aliases:
|
aliases:
|
||||||
- hylang
|
- hylang
|
||||||
tm_scope: none
|
tm_scope: source.hy
|
||||||
|
|
||||||
IDL:
|
IDL:
|
||||||
type: programming
|
type: programming
|
||||||
@@ -1381,13 +1382,6 @@ JSON:
|
|||||||
extensions:
|
extensions:
|
||||||
- .json
|
- .json
|
||||||
- .lock
|
- .lock
|
||||||
- .sublime-keymap
|
|
||||||
- .sublime-mousemap
|
|
||||||
- .sublime-project
|
|
||||||
- .sublime-settings
|
|
||||||
- .sublime-workspace
|
|
||||||
- .sublime_metrics
|
|
||||||
- .sublime_session
|
|
||||||
filenames:
|
filenames:
|
||||||
- .jshintrc
|
- .jshintrc
|
||||||
- composer.lock
|
- composer.lock
|
||||||
@@ -1471,6 +1465,19 @@ JavaScript:
|
|||||||
- .pac
|
- .pac
|
||||||
- .sjs
|
- .sjs
|
||||||
- .ssjs
|
- .ssjs
|
||||||
|
- .sublime-build
|
||||||
|
- .sublime-commands
|
||||||
|
- .sublime-completions
|
||||||
|
- .sublime-keymap
|
||||||
|
- .sublime-macro
|
||||||
|
- .sublime-menu
|
||||||
|
- .sublime-mousemap
|
||||||
|
- .sublime-project
|
||||||
|
- .sublime-settings
|
||||||
|
- .sublime-theme
|
||||||
|
- .sublime-workspace
|
||||||
|
- .sublime_metrics
|
||||||
|
- .sublime_session
|
||||||
- .xsjs
|
- .xsjs
|
||||||
- .xsjslib
|
- .xsjslib
|
||||||
filenames:
|
filenames:
|
||||||
@@ -1891,6 +1898,7 @@ Nimrod:
|
|||||||
- .nim
|
- .nim
|
||||||
- .nimrod
|
- .nimrod
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
|
tm_scope: source.nim
|
||||||
|
|
||||||
Ninja:
|
Ninja:
|
||||||
type: data
|
type: data
|
||||||
@@ -1904,7 +1912,7 @@ Nit:
|
|||||||
color: "#0d8921"
|
color: "#0d8921"
|
||||||
extensions:
|
extensions:
|
||||||
- .nit
|
- .nit
|
||||||
tm_scope: none
|
tm_scope: source.nit
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
|
|
||||||
Nix:
|
Nix:
|
||||||
@@ -2620,6 +2628,8 @@ SQL:
|
|||||||
ace_mode: sql
|
ace_mode: sql
|
||||||
extensions:
|
extensions:
|
||||||
- .sql
|
- .sql
|
||||||
|
- .cql
|
||||||
|
- .ddl
|
||||||
- .prc
|
- .prc
|
||||||
- .tab
|
- .tab
|
||||||
- .udf
|
- .udf
|
||||||
@@ -3129,6 +3139,8 @@ XML:
|
|||||||
- .rss
|
- .rss
|
||||||
- .scxml
|
- .scxml
|
||||||
- .srdf
|
- .srdf
|
||||||
|
- .stTheme
|
||||||
|
- .sublime-snippet
|
||||||
- .svg
|
- .svg
|
||||||
- .targets
|
- .targets
|
||||||
- .tmCommand
|
- .tmCommand
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
module Linguist
|
module Linguist
|
||||||
VERSION = "4.2.5"
|
VERSION = "4.2.6"
|
||||||
end
|
end
|
||||||
|
|||||||
798
samples/Nit/file.nit
Normal file
798
samples/Nit/file.nit
Normal file
@@ -0,0 +1,798 @@
|
|||||||
|
# This file is part of NIT ( http://www.nitlanguage.org ).
|
||||||
|
#
|
||||||
|
# Copyright 2004-2008 Jean Privat <jean@pryen.org>
|
||||||
|
# Copyright 2008 Floréal Morandat <morandat@lirmm.fr>
|
||||||
|
# Copyright 2008 Jean-Sébastien Gélinas <calestar@gmail.com>
|
||||||
|
#
|
||||||
|
# This file is free software, which comes along with NIT. This software is
|
||||||
|
# distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
|
||||||
|
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||||
|
# PARTICULAR PURPOSE. You can modify it is you want, provided this header
|
||||||
|
# is kept unaltered, and a notification of the changes is added.
|
||||||
|
# You are allowed to redistribute it and sell it, alone or is a part of
|
||||||
|
# another product.
|
||||||
|
|
||||||
|
# File manipulations (create, read, write, etc.)
|
||||||
|
module file
|
||||||
|
|
||||||
|
intrude import stream
|
||||||
|
intrude import ropes
|
||||||
|
import string_search
|
||||||
|
import time
|
||||||
|
|
||||||
|
in "C Header" `{
|
||||||
|
#include <dirent.h>
|
||||||
|
#include <string.h>
|
||||||
|
#include <sys/types.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <poll.h>
|
||||||
|
#include <errno.h>
|
||||||
|
`}
|
||||||
|
|
||||||
|
# File Abstract Stream
|
||||||
|
abstract class FStream
|
||||||
|
super IOS
|
||||||
|
# The path of the file.
|
||||||
|
var path: nullable String = null
|
||||||
|
|
||||||
|
# The FILE *.
|
||||||
|
private var file: nullable NativeFile = null
|
||||||
|
|
||||||
|
fun file_stat: FileStat do return _file.file_stat
|
||||||
|
|
||||||
|
# File descriptor of this file
|
||||||
|
fun fd: Int do return _file.fileno
|
||||||
|
end
|
||||||
|
|
||||||
|
# File input stream
|
||||||
|
class IFStream
|
||||||
|
super FStream
|
||||||
|
super BufferedIStream
|
||||||
|
super PollableIStream
|
||||||
|
# Misc
|
||||||
|
|
||||||
|
# Open the same file again.
|
||||||
|
# The original path is reused, therefore the reopened file can be a different file.
|
||||||
|
fun reopen
|
||||||
|
do
|
||||||
|
if not eof and not _file.address_is_null then close
|
||||||
|
_file = new NativeFile.io_open_read(path.to_cstring)
|
||||||
|
if _file.address_is_null then
|
||||||
|
last_error = new IOError("Error: Opening file at '{path.as(not null)}' failed with '{sys.errno.strerror}'")
|
||||||
|
end_reached = true
|
||||||
|
return
|
||||||
|
end
|
||||||
|
end_reached = false
|
||||||
|
_buffer_pos = 0
|
||||||
|
_buffer.clear
|
||||||
|
end
|
||||||
|
|
||||||
|
redef fun close
|
||||||
|
do
|
||||||
|
if _file.address_is_null then return
|
||||||
|
var i = _file.io_close
|
||||||
|
_buffer.clear
|
||||||
|
end_reached = true
|
||||||
|
end
|
||||||
|
|
||||||
|
redef fun fill_buffer
|
||||||
|
do
|
||||||
|
var nb = _file.io_read(_buffer.items, _buffer.capacity)
|
||||||
|
if nb <= 0 then
|
||||||
|
end_reached = true
|
||||||
|
nb = 0
|
||||||
|
end
|
||||||
|
_buffer.length = nb
|
||||||
|
_buffer_pos = 0
|
||||||
|
end
|
||||||
|
# End of file?
|
||||||
|
redef var end_reached: Bool = false
|
||||||
|
|
||||||
|
# Open the file at `path` for reading.
|
||||||
|
init open(path: String)
|
||||||
|
do
|
||||||
|
self.path = path
|
||||||
|
prepare_buffer(10)
|
||||||
|
_file = new NativeFile.io_open_read(path.to_cstring)
|
||||||
|
if _file.address_is_null then
|
||||||
|
last_error = new IOError("Error: Opening file at '{path}' failed with '{sys.errno.strerror}'")
|
||||||
|
end_reached = true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
init from_fd(fd: Int) do
|
||||||
|
self.path = ""
|
||||||
|
prepare_buffer(10)
|
||||||
|
_file = fd_to_stream(fd, read_only)
|
||||||
|
if _file.address_is_null then
|
||||||
|
last_error = new IOError("Error: Converting fd {fd} to stream failed with '{sys.errno.strerror}'")
|
||||||
|
end_reached = true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# File output stream
|
||||||
|
class OFStream
|
||||||
|
super FStream
|
||||||
|
super OStream
|
||||||
|
|
||||||
|
redef fun write(s)
|
||||||
|
do
|
||||||
|
if last_error != null then return
|
||||||
|
if not _is_writable then
|
||||||
|
last_error = new IOError("Cannot write to non-writable stream")
|
||||||
|
return
|
||||||
|
end
|
||||||
|
if s isa FlatText then
|
||||||
|
write_native(s.to_cstring, s.length)
|
||||||
|
else
|
||||||
|
for i in s.substrings do write_native(i.to_cstring, i.length)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
redef fun close
|
||||||
|
do
|
||||||
|
if _file.address_is_null then
|
||||||
|
last_error = new IOError("Cannot close non-existing write stream")
|
||||||
|
_is_writable = false
|
||||||
|
return
|
||||||
|
end
|
||||||
|
var i = _file.io_close
|
||||||
|
_is_writable = false
|
||||||
|
end
|
||||||
|
redef var is_writable = false
|
||||||
|
|
||||||
|
# Write `len` bytes from `native`.
|
||||||
|
private fun write_native(native: NativeString, len: Int)
|
||||||
|
do
|
||||||
|
if last_error != null then return
|
||||||
|
if not _is_writable then
|
||||||
|
last_error = new IOError("Cannot write to non-writable stream")
|
||||||
|
return
|
||||||
|
end
|
||||||
|
if _file.address_is_null then
|
||||||
|
last_error = new IOError("Writing on a null stream")
|
||||||
|
_is_writable = false
|
||||||
|
return
|
||||||
|
end
|
||||||
|
var err = _file.io_write(native, len)
|
||||||
|
if err != len then
|
||||||
|
# Big problem
|
||||||
|
last_error = new IOError("Problem in writing : {err} {len} \n")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Open the file at `path` for writing.
|
||||||
|
init open(path: String)
|
||||||
|
do
|
||||||
|
_file = new NativeFile.io_open_write(path.to_cstring)
|
||||||
|
if _file.address_is_null then
|
||||||
|
last_error = new IOError("Error: Opening file at '{path}' failed with '{sys.errno.strerror}'")
|
||||||
|
self.path = path
|
||||||
|
is_writable = false
|
||||||
|
end
|
||||||
|
self.path = path
|
||||||
|
_is_writable = true
|
||||||
|
end
|
||||||
|
|
||||||
|
# Creates a new File stream from a file descriptor
|
||||||
|
init from_fd(fd: Int) do
|
||||||
|
self.path = ""
|
||||||
|
_file = fd_to_stream(fd, wipe_write)
|
||||||
|
_is_writable = true
|
||||||
|
if _file.address_is_null then
|
||||||
|
last_error = new IOError("Error: Opening stream from file descriptor {fd} failed with '{sys.errno.strerror}'")
|
||||||
|
_is_writable = false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
redef interface Object
|
||||||
|
|
||||||
|
private fun read_only: NativeString do return "r".to_cstring
|
||||||
|
|
||||||
|
private fun wipe_write: NativeString do return "w".to_cstring
|
||||||
|
|
||||||
|
private fun fd_to_stream(fd: Int, mode: NativeString): NativeFile `{
|
||||||
|
return fdopen(fd, mode);
|
||||||
|
`}
|
||||||
|
|
||||||
|
# returns first available stream to read or write to
|
||||||
|
# return null on interruption (possibly a signal)
|
||||||
|
protected fun poll( streams : Sequence[FStream] ) : nullable FStream
|
||||||
|
do
|
||||||
|
var in_fds = new Array[Int]
|
||||||
|
var out_fds = new Array[Int]
|
||||||
|
var fd_to_stream = new HashMap[Int,FStream]
|
||||||
|
for s in streams do
|
||||||
|
var fd = s.fd
|
||||||
|
if s isa IFStream then in_fds.add( fd )
|
||||||
|
if s isa OFStream then out_fds.add( fd )
|
||||||
|
|
||||||
|
fd_to_stream[fd] = s
|
||||||
|
end
|
||||||
|
|
||||||
|
var polled_fd = intern_poll( in_fds, out_fds )
|
||||||
|
|
||||||
|
if polled_fd == null then
|
||||||
|
return null
|
||||||
|
else
|
||||||
|
return fd_to_stream[polled_fd]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
private fun intern_poll(in_fds: Array[Int], out_fds: Array[Int]) : nullable Int is extern import Array[Int].length, Array[Int].[], Int.as(nullable Int) `{
|
||||||
|
int in_len, out_len, total_len;
|
||||||
|
struct pollfd *c_fds;
|
||||||
|
sigset_t sigmask;
|
||||||
|
int i;
|
||||||
|
int first_polled_fd = -1;
|
||||||
|
int result;
|
||||||
|
|
||||||
|
in_len = Array_of_Int_length( in_fds );
|
||||||
|
out_len = Array_of_Int_length( out_fds );
|
||||||
|
total_len = in_len + out_len;
|
||||||
|
c_fds = malloc( sizeof(struct pollfd) * total_len );
|
||||||
|
|
||||||
|
/* input streams */
|
||||||
|
for ( i=0; i<in_len; i ++ ) {
|
||||||
|
int fd;
|
||||||
|
fd = Array_of_Int__index( in_fds, i );
|
||||||
|
|
||||||
|
c_fds[i].fd = fd;
|
||||||
|
c_fds[i].events = POLLIN;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* output streams */
|
||||||
|
for ( i=0; i<out_len; i ++ ) {
|
||||||
|
int fd;
|
||||||
|
fd = Array_of_Int__index( out_fds, i );
|
||||||
|
|
||||||
|
c_fds[i].fd = fd;
|
||||||
|
c_fds[i].events = POLLOUT;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* poll all fds, unlimited timeout */
|
||||||
|
result = poll( c_fds, total_len, -1 );
|
||||||
|
|
||||||
|
if ( result > 0 ) {
|
||||||
|
/* analyse results */
|
||||||
|
for ( i=0; i<total_len; i++ )
|
||||||
|
if ( c_fds[i].revents & c_fds[i].events || /* awaited event */
|
||||||
|
c_fds[i].revents & POLLHUP ) /* closed */
|
||||||
|
{
|
||||||
|
first_polled_fd = c_fds[i].fd;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Int_as_nullable( first_polled_fd );
|
||||||
|
}
|
||||||
|
else if ( result < 0 )
|
||||||
|
fprintf( stderr, "Error in Stream:poll: %s\n", strerror( errno ) );
|
||||||
|
|
||||||
|
return null_Int();
|
||||||
|
`}
|
||||||
|
end
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
class Stdin
|
||||||
|
super IFStream
|
||||||
|
|
||||||
|
init do
|
||||||
|
_file = new NativeFile.native_stdin
|
||||||
|
path = "/dev/stdin"
|
||||||
|
prepare_buffer(1)
|
||||||
|
end
|
||||||
|
|
||||||
|
redef fun poll_in: Bool is extern "file_stdin_poll_in"
|
||||||
|
end
|
||||||
|
|
||||||
|
class Stdout
|
||||||
|
super OFStream
|
||||||
|
init do
|
||||||
|
_file = new NativeFile.native_stdout
|
||||||
|
path = "/dev/stdout"
|
||||||
|
_is_writable = true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class Stderr
|
||||||
|
super OFStream
|
||||||
|
init do
|
||||||
|
_file = new NativeFile.native_stderr
|
||||||
|
path = "/dev/stderr"
|
||||||
|
_is_writable = true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
redef class Streamable
|
||||||
|
# Like `write_to` but take care of creating the file
|
||||||
|
fun write_to_file(filepath: String)
|
||||||
|
do
|
||||||
|
var stream = new OFStream.open(filepath)
|
||||||
|
write_to(stream)
|
||||||
|
stream.close
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
redef class String
|
||||||
|
# return true if a file with this names exists
|
||||||
|
fun file_exists: Bool do return to_cstring.file_exists
|
||||||
|
|
||||||
|
# The status of a file. see POSIX stat(2).
|
||||||
|
fun file_stat: FileStat do return to_cstring.file_stat
|
||||||
|
|
||||||
|
# The status of a file or of a symlink. see POSIX lstat(2).
|
||||||
|
fun file_lstat: FileStat do return to_cstring.file_lstat
|
||||||
|
|
||||||
|
# Remove a file, return true if success
|
||||||
|
fun file_delete: Bool do return to_cstring.file_delete
|
||||||
|
|
||||||
|
# Copy content of file at `self` to `dest`
|
||||||
|
fun file_copy_to(dest: String)
|
||||||
|
do
|
||||||
|
var input = new IFStream.open(self)
|
||||||
|
var output = new OFStream.open(dest)
|
||||||
|
|
||||||
|
while not input.eof do
|
||||||
|
var buffer = input.read(1024)
|
||||||
|
output.write buffer
|
||||||
|
end
|
||||||
|
|
||||||
|
input.close
|
||||||
|
output.close
|
||||||
|
end
|
||||||
|
|
||||||
|
# Remove the trailing extension `ext`.
|
||||||
|
#
|
||||||
|
# `ext` usually starts with a dot but could be anything.
|
||||||
|
#
|
||||||
|
# assert "file.txt".strip_extension(".txt") == "file"
|
||||||
|
# assert "file.txt".strip_extension("le.txt") == "fi"
|
||||||
|
# assert "file.txt".strip_extension("xt") == "file.t"
|
||||||
|
#
|
||||||
|
# if `ext` is not present, `self` is returned unmodified.
|
||||||
|
#
|
||||||
|
# assert "file.txt".strip_extension(".tar.gz") == "file.txt"
|
||||||
|
fun strip_extension(ext: String): String
|
||||||
|
do
|
||||||
|
if has_suffix(ext) then
|
||||||
|
return substring(0, length - ext.length)
|
||||||
|
end
|
||||||
|
return self
|
||||||
|
end
|
||||||
|
|
||||||
|
# Extract the basename of a path and remove the extension
|
||||||
|
#
|
||||||
|
# assert "/path/to/a_file.ext".basename(".ext") == "a_file"
|
||||||
|
# assert "path/to/a_file.ext".basename(".ext") == "a_file"
|
||||||
|
# assert "path/to".basename(".ext") == "to"
|
||||||
|
# assert "path/to/".basename(".ext") == "to"
|
||||||
|
# assert "path".basename("") == "path"
|
||||||
|
# assert "/path".basename("") == "path"
|
||||||
|
# assert "/".basename("") == "/"
|
||||||
|
# assert "".basename("") == ""
|
||||||
|
fun basename(ext: String): String
|
||||||
|
do
|
||||||
|
var l = length - 1 # Index of the last char
|
||||||
|
while l > 0 and self.chars[l] == '/' do l -= 1 # remove all trailing `/`
|
||||||
|
if l == 0 then return "/"
|
||||||
|
var pos = chars.last_index_of_from('/', l)
|
||||||
|
var n = self
|
||||||
|
if pos >= 0 then
|
||||||
|
n = substring(pos+1, l-pos)
|
||||||
|
end
|
||||||
|
return n.strip_extension(ext)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Extract the dirname of a path
|
||||||
|
#
|
||||||
|
# assert "/path/to/a_file.ext".dirname == "/path/to"
|
||||||
|
# assert "path/to/a_file.ext".dirname == "path/to"
|
||||||
|
# assert "path/to".dirname == "path"
|
||||||
|
# assert "path/to/".dirname == "path"
|
||||||
|
# assert "path".dirname == "."
|
||||||
|
# assert "/path".dirname == "/"
|
||||||
|
# assert "/".dirname == "/"
|
||||||
|
# assert "".dirname == "."
|
||||||
|
fun dirname: String
|
||||||
|
do
|
||||||
|
var l = length - 1 # Index of the last char
|
||||||
|
while l > 0 and self.chars[l] == '/' do l -= 1 # remove all trailing `/`
|
||||||
|
var pos = chars.last_index_of_from('/', l)
|
||||||
|
if pos > 0 then
|
||||||
|
return substring(0, pos)
|
||||||
|
else if pos == 0 then
|
||||||
|
return "/"
|
||||||
|
else
|
||||||
|
return "."
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Return the canonicalized absolute pathname (see POSIX function `realpath`)
|
||||||
|
fun realpath: String do
|
||||||
|
var cs = to_cstring.file_realpath
|
||||||
|
var res = cs.to_s_with_copy
|
||||||
|
# cs.free_malloc # FIXME memory leak
|
||||||
|
return res
|
||||||
|
end
|
||||||
|
|
||||||
|
# Simplify a file path by remove useless ".", removing "//", and resolving ".."
|
||||||
|
# ".." are not resolved if they start the path
|
||||||
|
# starting "/" is not removed
|
||||||
|
# trainling "/" is removed
|
||||||
|
#
|
||||||
|
# Note that the method only wonrk on the string:
|
||||||
|
# * no I/O access is performed
|
||||||
|
# * the validity of the path is not checked
|
||||||
|
#
|
||||||
|
# assert "some/./complex/../../path/from/../to/a////file//".simplify_path == "path/to/a/file"
|
||||||
|
# assert "../dir/file".simplify_path == "../dir/file"
|
||||||
|
# assert "dir/../../".simplify_path == ".."
|
||||||
|
# assert "dir/..".simplify_path == "."
|
||||||
|
# assert "//absolute//path/".simplify_path == "/absolute/path"
|
||||||
|
# assert "//absolute//../".simplify_path == "/"
|
||||||
|
fun simplify_path: String
|
||||||
|
do
|
||||||
|
var a = self.split_with("/")
|
||||||
|
var a2 = new Array[String]
|
||||||
|
for x in a do
|
||||||
|
if x == "." then continue
|
||||||
|
if x == "" and not a2.is_empty then continue
|
||||||
|
if x == ".." and not a2.is_empty and a2.last != ".." then
|
||||||
|
a2.pop
|
||||||
|
continue
|
||||||
|
end
|
||||||
|
a2.push(x)
|
||||||
|
end
|
||||||
|
if a2.is_empty then return "."
|
||||||
|
if a2.length == 1 and a2.first == "" then return "/"
|
||||||
|
return a2.join("/")
|
||||||
|
end
|
||||||
|
|
||||||
|
# Correctly join two path using the directory separator.
|
||||||
|
#
|
||||||
|
# Using a standard "{self}/{path}" does not work in the following cases:
|
||||||
|
#
|
||||||
|
# * `self` is empty.
|
||||||
|
# * `path` ends with `'/'`.
|
||||||
|
# * `path` starts with `'/'`.
|
||||||
|
#
|
||||||
|
# This method ensures that the join is valid.
|
||||||
|
#
|
||||||
|
# assert "hello".join_path("world") == "hello/world"
|
||||||
|
# assert "hel/lo".join_path("wor/ld") == "hel/lo/wor/ld"
|
||||||
|
# assert "".join_path("world") == "world"
|
||||||
|
# assert "hello".join_path("/world") == "/world"
|
||||||
|
# assert "hello/".join_path("world") == "hello/world"
|
||||||
|
# assert "hello/".join_path("/world") == "/world"
|
||||||
|
#
|
||||||
|
# Note: You may want to use `simplify_path` on the result.
|
||||||
|
#
|
||||||
|
# Note: This method works only with POSIX paths.
|
||||||
|
fun join_path(path: String): String
|
||||||
|
do
|
||||||
|
if path.is_empty then return self
|
||||||
|
if self.is_empty then return path
|
||||||
|
if path.chars[0] == '/' then return path
|
||||||
|
if self.last == '/' then return "{self}{path}"
|
||||||
|
return "{self}/{path}"
|
||||||
|
end
|
||||||
|
|
||||||
|
# Convert the path (`self`) to a program name.
|
||||||
|
#
|
||||||
|
# Ensure the path (`self`) will be treated as-is by POSIX shells when it is
|
||||||
|
# used as a program name. In order to do that, prepend `./` if needed.
|
||||||
|
#
|
||||||
|
# assert "foo".to_program_name == "./foo"
|
||||||
|
# assert "/foo".to_program_name == "/foo"
|
||||||
|
# assert "".to_program_name == "./" # At least, your shell will detect the error.
|
||||||
|
fun to_program_name: String do
|
||||||
|
if self.has_prefix("/") then
|
||||||
|
return self
|
||||||
|
else
|
||||||
|
return "./{self}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Alias for `join_path`
|
||||||
|
#
|
||||||
|
# assert "hello" / "world" == "hello/world"
|
||||||
|
# assert "hel/lo" / "wor/ld" == "hel/lo/wor/ld"
|
||||||
|
# assert "" / "world" == "world"
|
||||||
|
# assert "/hello" / "/world" == "/world"
|
||||||
|
#
|
||||||
|
# This operator is quite useful for chaining changes of path.
|
||||||
|
# The next one being relative to the previous one.
|
||||||
|
#
|
||||||
|
# var a = "foo"
|
||||||
|
# var b = "/bar"
|
||||||
|
# var c = "baz/foobar"
|
||||||
|
# assert a/b/c == "/bar/baz/foobar"
|
||||||
|
fun /(path: String): String do return join_path(path)
|
||||||
|
|
||||||
|
# Returns the relative path needed to go from `self` to `dest`.
|
||||||
|
#
|
||||||
|
# assert "/foo/bar".relpath("/foo/baz") == "../baz"
|
||||||
|
# assert "/foo/bar".relpath("/baz/bar") == "../../baz/bar"
|
||||||
|
#
|
||||||
|
# If `self` or `dest` is relative, they are considered relatively to `getcwd`.
|
||||||
|
#
|
||||||
|
# In some cases, the result is still independent of the current directory:
|
||||||
|
#
|
||||||
|
# assert "foo/bar".relpath("..") == "../../.."
|
||||||
|
#
|
||||||
|
# In other cases, parts of the current directory may be exhibited:
|
||||||
|
#
|
||||||
|
# var p = "../foo/bar".relpath("baz")
|
||||||
|
# var c = getcwd.basename("")
|
||||||
|
# assert p == "../../{c}/baz"
|
||||||
|
#
|
||||||
|
# For path resolution independent of the current directory (eg. for paths in URL),
|
||||||
|
# or to use an other starting directory than the current directory,
|
||||||
|
# just force absolute paths:
|
||||||
|
#
|
||||||
|
# var start = "/a/b/c/d"
|
||||||
|
# var p2 = (start/"../foo/bar").relpath(start/"baz")
|
||||||
|
# assert p2 == "../../d/baz"
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# Neither `self` or `dest` has to be real paths or to exist in directories since
|
||||||
|
# the resolution is only done with string manipulations and without any access to
|
||||||
|
# the underlying file system.
|
||||||
|
#
|
||||||
|
# If `self` and `dest` are the same directory, the empty string is returned:
|
||||||
|
#
|
||||||
|
# assert "foo".relpath("foo") == ""
|
||||||
|
# assert "foo/../bar".relpath("bar") == ""
|
||||||
|
#
|
||||||
|
# The empty string and "." designate both the current directory:
|
||||||
|
#
|
||||||
|
# assert "".relpath("foo/bar") == "foo/bar"
|
||||||
|
# assert ".".relpath("foo/bar") == "foo/bar"
|
||||||
|
# assert "foo/bar".relpath("") == "../.."
|
||||||
|
# assert "/" + "/".relpath(".") == getcwd
|
||||||
|
fun relpath(dest: String): String
|
||||||
|
do
|
||||||
|
var cwd = getcwd
|
||||||
|
var from = (cwd/self).simplify_path.split("/")
|
||||||
|
if from.last.is_empty then from.pop # case for the root directory
|
||||||
|
var to = (cwd/dest).simplify_path.split("/")
|
||||||
|
if to.last.is_empty then to.pop # case for the root directory
|
||||||
|
|
||||||
|
# Remove common prefixes
|
||||||
|
while not from.is_empty and not to.is_empty and from.first == to.first do
|
||||||
|
from.shift
|
||||||
|
to.shift
|
||||||
|
end
|
||||||
|
|
||||||
|
# Result is going up in `from` with ".." then going down following `to`
|
||||||
|
var from_len = from.length
|
||||||
|
if from_len == 0 then return to.join("/")
|
||||||
|
var up = "../"*(from_len-1) + ".."
|
||||||
|
if to.is_empty then return up
|
||||||
|
var res = up + "/" + to.join("/")
|
||||||
|
return res
|
||||||
|
end
|
||||||
|
|
||||||
|
# Create a directory (and all intermediate directories if needed)
|
||||||
|
fun mkdir
|
||||||
|
do
|
||||||
|
var dirs = self.split_with("/")
|
||||||
|
var path = new FlatBuffer
|
||||||
|
if dirs.is_empty then return
|
||||||
|
if dirs[0].is_empty then
|
||||||
|
# it was a starting /
|
||||||
|
path.add('/')
|
||||||
|
end
|
||||||
|
for d in dirs do
|
||||||
|
if d.is_empty then continue
|
||||||
|
path.append(d)
|
||||||
|
path.add('/')
|
||||||
|
path.to_s.to_cstring.file_mkdir
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Delete a directory and all of its content, return `true` on success
|
||||||
|
#
|
||||||
|
# Does not go through symbolic links and may get stuck in a cycle if there
|
||||||
|
# is a cycle in the filesystem.
|
||||||
|
fun rmdir: Bool
|
||||||
|
do
|
||||||
|
var ok = true
|
||||||
|
for file in self.files do
|
||||||
|
var file_path = self.join_path(file)
|
||||||
|
var stat = file_path.file_lstat
|
||||||
|
if stat.is_dir then
|
||||||
|
ok = file_path.rmdir and ok
|
||||||
|
else
|
||||||
|
ok = file_path.file_delete and ok
|
||||||
|
end
|
||||||
|
stat.free
|
||||||
|
end
|
||||||
|
|
||||||
|
# Delete the directory itself
|
||||||
|
if ok then to_cstring.rmdir
|
||||||
|
|
||||||
|
return ok
|
||||||
|
end
|
||||||
|
|
||||||
|
# Change the current working directory
|
||||||
|
#
|
||||||
|
# "/etc".chdir
|
||||||
|
# assert getcwd == "/etc"
|
||||||
|
# "..".chdir
|
||||||
|
# assert getcwd == "/"
|
||||||
|
#
|
||||||
|
# TODO: errno
|
||||||
|
fun chdir do to_cstring.file_chdir
|
||||||
|
|
||||||
|
# Return right-most extension (without the dot)
|
||||||
|
#
|
||||||
|
# Only the last extension is returned.
|
||||||
|
# There is no special case for combined extensions.
|
||||||
|
#
|
||||||
|
# assert "file.txt".file_extension == "txt"
|
||||||
|
# assert "file.tar.gz".file_extension == "gz"
|
||||||
|
#
|
||||||
|
# For file without extension, `null` is returned.
|
||||||
|
# Hoever, for trailing dot, `""` is returned.
|
||||||
|
#
|
||||||
|
# assert "file".file_extension == null
|
||||||
|
# assert "file.".file_extension == ""
|
||||||
|
#
|
||||||
|
# The starting dot of hidden files is never considered.
|
||||||
|
#
|
||||||
|
# assert ".file.txt".file_extension == "txt"
|
||||||
|
# assert ".file".file_extension == null
|
||||||
|
fun file_extension: nullable String
|
||||||
|
do
|
||||||
|
var last_slash = chars.last_index_of('.')
|
||||||
|
if last_slash > 0 then
|
||||||
|
return substring( last_slash+1, length )
|
||||||
|
else
|
||||||
|
return null
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# returns files contained within the directory represented by self
|
||||||
|
fun files : Set[ String ] is extern import HashSet[String], HashSet[String].add, NativeString.to_s, String.to_cstring, HashSet[String].as(Set[String]) `{
|
||||||
|
char *dir_path;
|
||||||
|
DIR *dir;
|
||||||
|
|
||||||
|
dir_path = String_to_cstring( recv );
|
||||||
|
if ((dir = opendir(dir_path)) == NULL)
|
||||||
|
{
|
||||||
|
perror( dir_path );
|
||||||
|
exit( 1 );
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
HashSet_of_String results;
|
||||||
|
String file_name;
|
||||||
|
struct dirent *de;
|
||||||
|
|
||||||
|
results = new_HashSet_of_String();
|
||||||
|
|
||||||
|
while ( ( de = readdir( dir ) ) != NULL )
|
||||||
|
if ( strcmp( de->d_name, ".." ) != 0 &&
|
||||||
|
strcmp( de->d_name, "." ) != 0 )
|
||||||
|
{
|
||||||
|
file_name = NativeString_to_s( strdup( de->d_name ) );
|
||||||
|
HashSet_of_String_add( results, file_name );
|
||||||
|
}
|
||||||
|
|
||||||
|
closedir( dir );
|
||||||
|
return HashSet_of_String_as_Set_of_String( results );
|
||||||
|
}
|
||||||
|
`}
|
||||||
|
end
|
||||||
|
|
||||||
|
redef class NativeString
|
||||||
|
private fun file_exists: Bool is extern "string_NativeString_NativeString_file_exists_0"
|
||||||
|
private fun file_stat: FileStat is extern "string_NativeString_NativeString_file_stat_0"
|
||||||
|
private fun file_lstat: FileStat `{
|
||||||
|
struct stat* stat_element;
|
||||||
|
int res;
|
||||||
|
stat_element = malloc(sizeof(struct stat));
|
||||||
|
res = lstat(recv, stat_element);
|
||||||
|
if (res == -1) return NULL;
|
||||||
|
return stat_element;
|
||||||
|
`}
|
||||||
|
private fun file_mkdir: Bool is extern "string_NativeString_NativeString_file_mkdir_0"
|
||||||
|
private fun rmdir: Bool `{ return rmdir(recv); `}
|
||||||
|
private fun file_delete: Bool is extern "string_NativeString_NativeString_file_delete_0"
|
||||||
|
private fun file_chdir is extern "string_NativeString_NativeString_file_chdir_0"
|
||||||
|
private fun file_realpath: NativeString is extern "file_NativeString_realpath"
|
||||||
|
end
|
||||||
|
|
||||||
|
# This class is system dependent ... must reify the vfs
|
||||||
|
extern class FileStat `{ struct stat * `}
|
||||||
|
# Returns the permission bits of file
|
||||||
|
fun mode: Int is extern "file_FileStat_FileStat_mode_0"
|
||||||
|
# Returns the last access time
|
||||||
|
fun atime: Int is extern "file_FileStat_FileStat_atime_0"
|
||||||
|
# Returns the last status change time
|
||||||
|
fun ctime: Int is extern "file_FileStat_FileStat_ctime_0"
|
||||||
|
# Returns the last modification time
|
||||||
|
fun mtime: Int is extern "file_FileStat_FileStat_mtime_0"
|
||||||
|
# Returns the size
|
||||||
|
fun size: Int is extern "file_FileStat_FileStat_size_0"
|
||||||
|
|
||||||
|
# Returns true if it is a regular file (not a device file, pipe, sockect, ...)
|
||||||
|
fun is_reg: Bool `{ return S_ISREG(recv->st_mode); `}
|
||||||
|
# Returns true if it is a directory
|
||||||
|
fun is_dir: Bool `{ return S_ISDIR(recv->st_mode); `}
|
||||||
|
# Returns true if it is a character device
|
||||||
|
fun is_chr: Bool `{ return S_ISCHR(recv->st_mode); `}
|
||||||
|
# Returns true if it is a block device
|
||||||
|
fun is_blk: Bool `{ return S_ISBLK(recv->st_mode); `}
|
||||||
|
# Returns true if the type is fifo
|
||||||
|
fun is_fifo: Bool `{ return S_ISFIFO(recv->st_mode); `}
|
||||||
|
# Returns true if the type is a link
|
||||||
|
fun is_lnk: Bool `{ return S_ISLNK(recv->st_mode); `}
|
||||||
|
# Returns true if the type is a socket
|
||||||
|
fun is_sock: Bool `{ return S_ISSOCK(recv->st_mode); `}
|
||||||
|
end
|
||||||
|
|
||||||
|
# Instance of this class are standard FILE * pointers
|
||||||
|
private extern class NativeFile `{ FILE* `}
|
||||||
|
fun io_read(buf: NativeString, len: Int): Int is extern "file_NativeFile_NativeFile_io_read_2"
|
||||||
|
fun io_write(buf: NativeString, len: Int): Int is extern "file_NativeFile_NativeFile_io_write_2"
|
||||||
|
fun io_close: Int is extern "file_NativeFile_NativeFile_io_close_0"
|
||||||
|
fun file_stat: FileStat is extern "file_NativeFile_NativeFile_file_stat_0"
|
||||||
|
fun fileno: Int `{ return fileno(recv); `}
|
||||||
|
|
||||||
|
new io_open_read(path: NativeString) is extern "file_NativeFileCapable_NativeFileCapable_io_open_read_1"
|
||||||
|
new io_open_write(path: NativeString) is extern "file_NativeFileCapable_NativeFileCapable_io_open_write_1"
|
||||||
|
new native_stdin is extern "file_NativeFileCapable_NativeFileCapable_native_stdin_0"
|
||||||
|
new native_stdout is extern "file_NativeFileCapable_NativeFileCapable_native_stdout_0"
|
||||||
|
new native_stderr is extern "file_NativeFileCapable_NativeFileCapable_native_stderr_0"
|
||||||
|
end
|
||||||
|
|
||||||
|
redef class Sys
|
||||||
|
|
||||||
|
# Standard input
|
||||||
|
var stdin: PollableIStream = new Stdin is protected writable
|
||||||
|
|
||||||
|
# Standard output
|
||||||
|
var stdout: OStream = new Stdout is protected writable
|
||||||
|
|
||||||
|
# Standard output for errors
|
||||||
|
var stderr: OStream = new Stderr is protected writable
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
# Print `objects` on the standard output (`stdout`).
|
||||||
|
protected fun printn(objects: Object...)
|
||||||
|
do
|
||||||
|
sys.stdout.write(objects.to_s)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Print an `object` on the standard output (`stdout`) and add a newline.
|
||||||
|
protected fun print(object: Object)
|
||||||
|
do
|
||||||
|
sys.stdout.write(object.to_s)
|
||||||
|
sys.stdout.write("\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
# Read a character from the standard input (`stdin`).
|
||||||
|
protected fun getc: Char
|
||||||
|
do
|
||||||
|
return sys.stdin.read_char.ascii
|
||||||
|
end
|
||||||
|
|
||||||
|
# Read a line from the standard input (`stdin`).
|
||||||
|
protected fun gets: String
|
||||||
|
do
|
||||||
|
return sys.stdin.read_line
|
||||||
|
end
|
||||||
|
|
||||||
|
# Return the working (current) directory
|
||||||
|
protected fun getcwd: String do return file_getcwd.to_s
|
||||||
|
private fun file_getcwd: NativeString is extern "string_NativeString_NativeString_file_getcwd_0"
|
||||||
376
samples/Nit/meetup.nit
Normal file
376
samples/Nit/meetup.nit
Normal file
@@ -0,0 +1,376 @@
|
|||||||
|
# This file is part of NIT ( http://www.nitlanguage.org ).
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License
|
||||||
|
|
||||||
|
# Shows a meetup and allows to modify its participants
|
||||||
|
module meetup
|
||||||
|
|
||||||
|
import opportunity_model
|
||||||
|
import boilerplate
|
||||||
|
import welcome
|
||||||
|
import template
|
||||||
|
|
||||||
|
# Shows a meetup and allows to modify its participants
|
||||||
|
class OpportunityMeetupPage
|
||||||
|
super OpportunityPage
|
||||||
|
|
||||||
|
# Meetup the page is supposed to show
|
||||||
|
var meetup: nullable Meetup = null
|
||||||
|
# Answer mode for the meetup
|
||||||
|
var mode = 0
|
||||||
|
|
||||||
|
init from_id(id: String) do
|
||||||
|
var db = new OpportunityDB.open("opportunity")
|
||||||
|
meetup = db.find_meetup_by_id(id)
|
||||||
|
db.close
|
||||||
|
if meetup != null then mode = meetup.answer_mode
|
||||||
|
init
|
||||||
|
end
|
||||||
|
|
||||||
|
init do
|
||||||
|
header.page_js = "mode = {mode};\n"
|
||||||
|
header.page_js += """
|
||||||
|
function update_scores(){
|
||||||
|
var anss = $('.answer');
|
||||||
|
var count = {};
|
||||||
|
var scores = {};
|
||||||
|
var answers = [];
|
||||||
|
var maxscore = 0;
|
||||||
|
for(i=0; i < anss.length; i++){
|
||||||
|
var incscore = 0;
|
||||||
|
var inccount = 0;
|
||||||
|
var idparts = anss[i].id.split("_");
|
||||||
|
var ansid = idparts[1];
|
||||||
|
var html = anss[i].innerHTML;
|
||||||
|
if(html === "<center>✔</center>"){
|
||||||
|
inccount = 1;
|
||||||
|
incscore = 2;
|
||||||
|
}else if(html === "<center>❓</center>"){
|
||||||
|
incscore = 1;
|
||||||
|
}
|
||||||
|
var intansid = parseInt(ansid)
|
||||||
|
if(answers.indexOf(intansid) == -1){
|
||||||
|
answers.push(intansid);
|
||||||
|
}
|
||||||
|
if(ansid in count){
|
||||||
|
count[ansid] += inccount;
|
||||||
|
}else{
|
||||||
|
count[ansid] = inccount;
|
||||||
|
}
|
||||||
|
if(ansid in scores){
|
||||||
|
scores[ansid] += incscore;
|
||||||
|
}else{
|
||||||
|
scores[ansid] = incscore;
|
||||||
|
}
|
||||||
|
if(scores[ansid] > maxscore){
|
||||||
|
maxscore = scores[ansid];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for(i=0; i < answers.length; i++){
|
||||||
|
var ansid = answers[i].toString();
|
||||||
|
var el = $('#total'+ansid)[0];
|
||||||
|
var ins = "<center>"+count[ansid];
|
||||||
|
if(scores[ansid] >= maxscore){
|
||||||
|
ins += "<br/><span style=\\"color:blue\\">★</span>";
|
||||||
|
}
|
||||||
|
ins += "</center>";
|
||||||
|
el.innerHTML = ins;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function change_answer(ele, id){
|
||||||
|
// modify only the currently selected entry
|
||||||
|
if (in_modification_id != id) return;
|
||||||
|
|
||||||
|
var e = document.getElementById(ele.id);
|
||||||
|
var i = e.innerHTML;
|
||||||
|
var ans = true;"""
|
||||||
|
if mode == 0 then
|
||||||
|
header.page_js += """
|
||||||
|
if(i === "<center>✔</center>"){
|
||||||
|
ans = 0;
|
||||||
|
e.innerHTML = "<center>✘</center>"
|
||||||
|
e.style.color = "red";
|
||||||
|
}else{
|
||||||
|
ans = 1;
|
||||||
|
e.innerHTML = "<center>✔</center>";
|
||||||
|
e.style.color = "green";
|
||||||
|
}"""
|
||||||
|
|
||||||
|
else
|
||||||
|
header.page_js += """
|
||||||
|
if(i === "<center>✔</center>"){
|
||||||
|
ans = 1;
|
||||||
|
e.innerHTML = "<center>❓</center>"
|
||||||
|
e.style.color = "#B8860B";
|
||||||
|
}else if(i === "<center>❓</center>"){
|
||||||
|
ans = 0;
|
||||||
|
e.innerHTML = "<center>✘</center>"
|
||||||
|
e.style.color = "red";
|
||||||
|
}else{
|
||||||
|
ans = 2;
|
||||||
|
e.innerHTML = "<center>✔</center>";
|
||||||
|
e.style.color = "green";
|
||||||
|
}"""
|
||||||
|
end
|
||||||
|
header.page_js += """
|
||||||
|
var a = ele.id.split('_')
|
||||||
|
var pid = a[1]
|
||||||
|
var aid = a[2]
|
||||||
|
update_scores();
|
||||||
|
$.ajax({
|
||||||
|
type: "POST",
|
||||||
|
url: "./rest/answer",
|
||||||
|
data: {
|
||||||
|
answer_id: aid,
|
||||||
|
pers_id: pid,
|
||||||
|
answer: ans
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function change_temp_answer(ele){
|
||||||
|
var e = document.getElementById(ele.id);
|
||||||
|
var i = e.innerHTML;"""
|
||||||
|
if mode == 0 then
|
||||||
|
header.page_js += """
|
||||||
|
if(i === "<center>✔</center>"){
|
||||||
|
e.innerHTML = "<center>✘</center>"
|
||||||
|
e.style.color = "red";
|
||||||
|
}else{
|
||||||
|
e.innerHTML = "<center>✔</center>";
|
||||||
|
e.style.color = "green";
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
else
|
||||||
|
header.page_js += """
|
||||||
|
if(i === "<center>✔</center>"){
|
||||||
|
e.innerHTML = "<center>❓</center>";
|
||||||
|
e.style.color = "#B8860B";
|
||||||
|
}else if(i === "<center>❓</center>"){
|
||||||
|
e.innerHTML = "<center>✘</center>"
|
||||||
|
e.style.color = "red";
|
||||||
|
}else{
|
||||||
|
e.innerHTML = "<center>✔</center>";
|
||||||
|
e.style.color = "green";
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
end
|
||||||
|
header.page_js += """
|
||||||
|
update_scores();
|
||||||
|
}
|
||||||
|
function add_part(ele){
|
||||||
|
var e = document.getElementById(ele.id);
|
||||||
|
var pname = document.getElementById("new_name").value;
|
||||||
|
var arr = e.id.split("_");
|
||||||
|
var mid = arr[1];
|
||||||
|
var ans = $('#' + ele.id).parent().parent().parent().children(".answer");
|
||||||
|
ansmap = {};
|
||||||
|
for(i=0;i<ans.length;i++){
|
||||||
|
var curr = ans.eq(i)
|
||||||
|
"""
|
||||||
|
if mode == 0 then
|
||||||
|
header.page_js += """
|
||||||
|
if(curr[0].innerHTML === "<center>✔</center>"){
|
||||||
|
ansmap[curr.attr('id')] = 1
|
||||||
|
}else{
|
||||||
|
ansmap[curr.attr('id')] = 0
|
||||||
|
}"""
|
||||||
|
else
|
||||||
|
header.page_js += """
|
||||||
|
if(curr[0].innerHTML === "<center>✔</center>"){
|
||||||
|
ansmap[curr.attr('id')] = 2
|
||||||
|
}else if(curr[0].innerHTML === "<center>❓</center>"){
|
||||||
|
ansmap[curr.attr('id')] = 1
|
||||||
|
}else{
|
||||||
|
ansmap[curr.attr('id')] = 0
|
||||||
|
}"""
|
||||||
|
end
|
||||||
|
header.page_js += """
|
||||||
|
}
|
||||||
|
$.ajax({
|
||||||
|
type: "POST",
|
||||||
|
url: "./rest/meetup/new_pers",
|
||||||
|
data: {
|
||||||
|
meetup_id: mid,
|
||||||
|
persname: pname,
|
||||||
|
answers: $.param(ansmap)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.done(function(data){
|
||||||
|
location.reload();
|
||||||
|
})
|
||||||
|
.fail(function(data){
|
||||||
|
//TODO: Notify of failure
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function remove_people(ele){
|
||||||
|
var arr = ele.id.split("_")
|
||||||
|
var pid = arr[1]
|
||||||
|
$('#' + ele.id).parent().parent().parent().remove();
|
||||||
|
update_scores();
|
||||||
|
$.ajax({
|
||||||
|
type: "POST",
|
||||||
|
url: "./rest/people",
|
||||||
|
data: {
|
||||||
|
method: "DELETE",
|
||||||
|
p_id: pid
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// ID of line currently open for modification
|
||||||
|
var in_modification_id = null;
|
||||||
|
function modify_people(ele, id){
|
||||||
|
if (in_modification_id != null) {
|
||||||
|
// reset to normal values
|
||||||
|
$('#modify_'+in_modification_id).text("Modify or delete");
|
||||||
|
$('#modify_'+in_modification_id).attr("class", "btn btn-xs btn-warning");
|
||||||
|
$('#line_'+in_modification_id).css("background-color", "");
|
||||||
|
$('#delete_'+in_modification_id).css("display", "none");
|
||||||
|
}
|
||||||
|
if (in_modification_id != id) {
|
||||||
|
// activate modifiable mode
|
||||||
|
$('#modify_'+id).text("Done");
|
||||||
|
$('#modify_'+id).attr("class", "btn btn-xs btn-success");
|
||||||
|
$('#line_'+id).css("background-color", "LightYellow");
|
||||||
|
$('#delete_'+id).show();
|
||||||
|
|
||||||
|
in_modification_id = id;
|
||||||
|
} else {
|
||||||
|
in_modification_id = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
end
|
||||||
|
|
||||||
|
redef fun rendering do
|
||||||
|
if meetup == null then
|
||||||
|
add((new OpportunityHomePage).write_to_string)
|
||||||
|
return
|
||||||
|
end
|
||||||
|
add header
|
||||||
|
var db = new OpportunityDB.open("opportunity")
|
||||||
|
add meetup.to_html(db)
|
||||||
|
db.close
|
||||||
|
add footer
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
redef class Meetup
|
||||||
|
# Build the HTML for `self`
|
||||||
|
fun to_html(db: OpportunityDB): Streamable do
|
||||||
|
var t = new Template
|
||||||
|
t.add """
|
||||||
|
<div class="container">
|
||||||
|
<div class="page-header">
|
||||||
|
<center><h1>{{{name}}}</h1></center>
|
||||||
|
"""
|
||||||
|
if not date.is_empty then t.add """
|
||||||
|
<center><h4>When: {{{date}}}</h4></center>"""
|
||||||
|
|
||||||
|
if not place.is_empty then t.add """
|
||||||
|
<center><h4>Where: {{{place}}}</h4></center>"""
|
||||||
|
|
||||||
|
t.add """
|
||||||
|
</div>
|
||||||
|
<table class="table">
|
||||||
|
"""
|
||||||
|
t.add "<th>Participant name</th>"
|
||||||
|
for i in answers(db) do
|
||||||
|
t.add "<th class=\"text-center\">"
|
||||||
|
t.add i.to_s
|
||||||
|
t.add "</th>"
|
||||||
|
end
|
||||||
|
t.add "<th></th>"
|
||||||
|
t.add "</tr>"
|
||||||
|
for i in participants(db) do
|
||||||
|
i.load_answers(db, self)
|
||||||
|
t.add "<tr id=\"line_{i.id}\">"
|
||||||
|
t.add "<td>"
|
||||||
|
t.add i.to_s
|
||||||
|
t.add "</td>"
|
||||||
|
for j, k in i.answers do
|
||||||
|
var color
|
||||||
|
if answer_mode == 0 then
|
||||||
|
if k == 1 then
|
||||||
|
color = "green"
|
||||||
|
else
|
||||||
|
color = "red"
|
||||||
|
end
|
||||||
|
else
|
||||||
|
if k == 2 then
|
||||||
|
color = "green"
|
||||||
|
else if k == 1 then
|
||||||
|
color = "#B8860B"
|
||||||
|
else
|
||||||
|
color = "red"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
t.add """<td class="answer" onclick="change_answer(this, {{{i.id}}})" id="answer_{{{j.id}}}_{{{i.id}}}" style="color:{{{color}}}">"""
|
||||||
|
t.add "<center>"
|
||||||
|
if answer_mode == 0 then
|
||||||
|
if k == 1 then
|
||||||
|
t.add "✔"
|
||||||
|
else
|
||||||
|
t.add "✘"
|
||||||
|
end
|
||||||
|
else
|
||||||
|
if k == 2 then
|
||||||
|
t.add "✔"
|
||||||
|
else if k == 1 then
|
||||||
|
t.add "❓"
|
||||||
|
else
|
||||||
|
t.add "✘"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
t.add "</center></td>"
|
||||||
|
end
|
||||||
|
t.add """<td class="opportunity-action"><center><button class="btn btn-xs btn-warning" type="button" onclick="modify_people(this, {{{i.id}}})" id="modify_{{{i.id}}}">Modify or delete</button> """
|
||||||
|
t.add """<button class="btn btn-xs btn-danger" type="button" onclick="remove_people(this)" id="delete_{{{i.id}}}" style="display: none;">Delete</button></center></td>"""
|
||||||
|
t.add "</tr>"
|
||||||
|
end
|
||||||
|
t.add """
|
||||||
|
<tr id="newrow" style="background-color: LightYellow">
|
||||||
|
<td><input id="new_name" type="text" placeholder="Your name" class="input-large"></td>
|
||||||
|
"""
|
||||||
|
for i in answers(db) do
|
||||||
|
t.add "<td class=\"answer\" id=\"newans_{i.id}\" onclick=\"change_temp_answer(this)\" style=\"color:red;\"><center>✘</center></td>"
|
||||||
|
end
|
||||||
|
t.add """
|
||||||
|
<td><center><span id="add_{{{id}}}" onclick="add_part(this)" style="color:green;" class="action"><button class="btn btn-xs btn-success" type="button">Done</button></span></center></td>"""
|
||||||
|
t.add "</tr>"
|
||||||
|
# Compute score for each answer
|
||||||
|
var scores = new HashMap[Int, Int]
|
||||||
|
var maxsc = 0
|
||||||
|
for i in answers(db) do
|
||||||
|
scores[i.id] = i.score(db)
|
||||||
|
if scores[i.id] > maxsc then maxsc = scores[i.id]
|
||||||
|
end
|
||||||
|
t.add """
|
||||||
|
<tr id="total">
|
||||||
|
<th>Total</th>
|
||||||
|
"""
|
||||||
|
for i in answers(db) do
|
||||||
|
t.add """<th id="total{{{i.id}}}"><center>{{{i.count(db)}}}"""
|
||||||
|
if scores.has_key(i.id) and scores[i.id] >= maxsc then
|
||||||
|
t.add """<br/><span style="color:blue">★</span>"""
|
||||||
|
end
|
||||||
|
t.add "</center></th>"
|
||||||
|
end
|
||||||
|
t.add "</th>"
|
||||||
|
t.add """
|
||||||
|
<th></th>
|
||||||
|
</tr>"""
|
||||||
|
t.add "</table>"
|
||||||
|
t.add "</div>"
|
||||||
|
return t
|
||||||
|
end
|
||||||
|
end
|
||||||
85
samples/SQL/videodb.cql
Normal file
85
samples/SQL/videodb.cql
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
CREATE KEYSPACE videodb WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 };
|
||||||
|
|
||||||
|
use videodb;
|
||||||
|
|
||||||
|
// Basic entity table
|
||||||
|
// Object mapping ?
|
||||||
|
CREATE TABLE users (
|
||||||
|
username varchar,
|
||||||
|
firstname varchar,
|
||||||
|
lastname varchar,
|
||||||
|
email varchar,
|
||||||
|
password varchar,
|
||||||
|
created_date timestamp,
|
||||||
|
total_credits int,
|
||||||
|
credit_change_date timeuuid,
|
||||||
|
PRIMARY KEY (username)
|
||||||
|
);
|
||||||
|
|
||||||
|
// One-to-many entity table
|
||||||
|
CREATE TABLE videos (
|
||||||
|
videoid uuid,
|
||||||
|
videoname varchar,
|
||||||
|
username varchar,
|
||||||
|
description varchar,
|
||||||
|
tags list<varchar>,
|
||||||
|
upload_date timestamp,
|
||||||
|
PRIMARY KEY (videoid)
|
||||||
|
);
|
||||||
|
|
||||||
|
// One-to-many from the user point of view
|
||||||
|
// Also know as a lookup table
|
||||||
|
CREATE TABLE username_video_index (
|
||||||
|
username varchar,
|
||||||
|
videoid uuid,
|
||||||
|
upload_date timestamp,
|
||||||
|
videoname varchar,
|
||||||
|
PRIMARY KEY (username, videoid)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Counter table
|
||||||
|
CREATE TABLE video_rating (
|
||||||
|
videoid uuid,
|
||||||
|
rating_counter counter,
|
||||||
|
rating_total counter,
|
||||||
|
PRIMARY KEY (videoid)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Creating index tables for tab keywords
|
||||||
|
CREATE TABLE tag_index (
|
||||||
|
tag varchar,
|
||||||
|
videoid uuid,
|
||||||
|
timestamp timestamp,
|
||||||
|
PRIMARY KEY (tag, videoid)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Comments as a many-to-many
|
||||||
|
// Looking from the video side to many users
|
||||||
|
CREATE TABLE comments_by_video (
|
||||||
|
videoid uuid,
|
||||||
|
username varchar,
|
||||||
|
comment_ts timestamp,
|
||||||
|
comment varchar,
|
||||||
|
PRIMARY KEY (videoid,comment_ts,username)
|
||||||
|
) WITH CLUSTERING ORDER BY (comment_ts DESC, username ASC);
|
||||||
|
|
||||||
|
// looking from the user side to many videos
|
||||||
|
CREATE TABLE comments_by_user (
|
||||||
|
username varchar,
|
||||||
|
videoid uuid,
|
||||||
|
comment_ts timestamp,
|
||||||
|
comment varchar,
|
||||||
|
PRIMARY KEY (username,comment_ts,videoid)
|
||||||
|
) WITH CLUSTERING ORDER BY (comment_ts DESC, videoid ASC);
|
||||||
|
|
||||||
|
|
||||||
|
// Time series wide row with reverse comparator
|
||||||
|
CREATE TABLE video_event (
|
||||||
|
videoid uuid,
|
||||||
|
username varchar,
|
||||||
|
event varchar,
|
||||||
|
event_timestamp timeuuid,
|
||||||
|
video_timestamp bigint,
|
||||||
|
PRIMARY KEY ((videoid,username), event_timestamp,event)
|
||||||
|
) WITH CLUSTERING ORDER BY (event_timestamp DESC,event ASC);
|
||||||
|
|
||||||
85
samples/SQL/videodb.ddl
Normal file
85
samples/SQL/videodb.ddl
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
CREATE KEYSPACE videodb WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 };
|
||||||
|
|
||||||
|
use videodb;
|
||||||
|
|
||||||
|
// Basic entity table
|
||||||
|
// Object mapping ?
|
||||||
|
CREATE TABLE users (
|
||||||
|
username varchar,
|
||||||
|
firstname varchar,
|
||||||
|
lastname varchar,
|
||||||
|
email varchar,
|
||||||
|
password varchar,
|
||||||
|
created_date timestamp,
|
||||||
|
total_credits int,
|
||||||
|
credit_change_date timeuuid,
|
||||||
|
PRIMARY KEY (username)
|
||||||
|
);
|
||||||
|
|
||||||
|
// One-to-many entity table
|
||||||
|
CREATE TABLE videos (
|
||||||
|
videoid uuid,
|
||||||
|
videoname varchar,
|
||||||
|
username varchar,
|
||||||
|
description varchar,
|
||||||
|
tags list<varchar>,
|
||||||
|
upload_date timestamp,
|
||||||
|
PRIMARY KEY (videoid)
|
||||||
|
);
|
||||||
|
|
||||||
|
// One-to-many from the user point of view
|
||||||
|
// Also know as a lookup table
|
||||||
|
CREATE TABLE username_video_index (
|
||||||
|
username varchar,
|
||||||
|
videoid uuid,
|
||||||
|
upload_date timestamp,
|
||||||
|
videoname varchar,
|
||||||
|
PRIMARY KEY (username, videoid)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Counter table
|
||||||
|
CREATE TABLE video_rating (
|
||||||
|
videoid uuid,
|
||||||
|
rating_counter counter,
|
||||||
|
rating_total counter,
|
||||||
|
PRIMARY KEY (videoid)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Creating index tables for tab keywords
|
||||||
|
CREATE TABLE tag_index (
|
||||||
|
tag varchar,
|
||||||
|
videoid uuid,
|
||||||
|
timestamp timestamp,
|
||||||
|
PRIMARY KEY (tag, videoid)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Comments as a many-to-many
|
||||||
|
// Looking from the video side to many users
|
||||||
|
CREATE TABLE comments_by_video (
|
||||||
|
videoid uuid,
|
||||||
|
username varchar,
|
||||||
|
comment_ts timestamp,
|
||||||
|
comment varchar,
|
||||||
|
PRIMARY KEY (videoid,comment_ts,username)
|
||||||
|
) WITH CLUSTERING ORDER BY (comment_ts DESC, username ASC);
|
||||||
|
|
||||||
|
// looking from the user side to many videos
|
||||||
|
CREATE TABLE comments_by_user (
|
||||||
|
username varchar,
|
||||||
|
videoid uuid,
|
||||||
|
comment_ts timestamp,
|
||||||
|
comment varchar,
|
||||||
|
PRIMARY KEY (username,comment_ts,videoid)
|
||||||
|
) WITH CLUSTERING ORDER BY (comment_ts DESC, videoid ASC);
|
||||||
|
|
||||||
|
|
||||||
|
// Time series wide row with reverse comparator
|
||||||
|
CREATE TABLE video_event (
|
||||||
|
videoid uuid,
|
||||||
|
username varchar,
|
||||||
|
event varchar,
|
||||||
|
event_timestamp timeuuid,
|
||||||
|
video_timestamp bigint,
|
||||||
|
PRIMARY KEY ((videoid,username), event_timestamp,event)
|
||||||
|
) WITH CLUSTERING ORDER BY (event_timestamp DESC,event ASC);
|
||||||
|
|
||||||
@@ -1 +1,2 @@
|
|||||||
the green potato=la pomme de terre verte
|
the green potato=la pomme de terre verte
|
||||||
|
le nouveau type de musique=the new type of music
|
||||||
|
|||||||
@@ -2,8 +2,10 @@
|
|||||||
|
|
||||||
require 'json'
|
require 'json'
|
||||||
require 'net/http'
|
require 'net/http'
|
||||||
|
require 'optparse'
|
||||||
require 'plist'
|
require 'plist'
|
||||||
require 'set'
|
require 'set'
|
||||||
|
require 'thread'
|
||||||
require 'tmpdir'
|
require 'tmpdir'
|
||||||
require 'uri'
|
require 'uri'
|
||||||
require 'yaml'
|
require 'yaml'
|
||||||
@@ -13,6 +15,13 @@ GRAMMARS_PATH = File.join(ROOT, "grammars")
|
|||||||
SOURCES_FILE = File.join(ROOT, "grammars.yml")
|
SOURCES_FILE = File.join(ROOT, "grammars.yml")
|
||||||
CSONC = File.join(ROOT, "node_modules", ".bin", "csonc")
|
CSONC = File.join(ROOT, "node_modules", ".bin", "csonc")
|
||||||
|
|
||||||
|
$options = {
|
||||||
|
:add => false,
|
||||||
|
:install => true,
|
||||||
|
:output => SOURCES_FILE,
|
||||||
|
:remote => true,
|
||||||
|
}
|
||||||
|
|
||||||
class SingleFile
|
class SingleFile
|
||||||
def initialize(path)
|
def initialize(path)
|
||||||
@path = path
|
@path = path
|
||||||
@@ -35,7 +44,7 @@ class DirectoryPackage
|
|||||||
path.split('/')[-2] == 'Syntaxes'
|
path.split('/')[-2] == 'Syntaxes'
|
||||||
when '.tmlanguage'
|
when '.tmlanguage'
|
||||||
true
|
true
|
||||||
when '.cson'
|
when '.cson', '.json'
|
||||||
path.split('/')[-2] == 'grammars'
|
path.split('/')[-2] == 'grammars'
|
||||||
else
|
else
|
||||||
false
|
false
|
||||||
@@ -143,22 +152,24 @@ def load_grammar(path)
|
|||||||
cson = `"#{CSONC}" "#{path}"`
|
cson = `"#{CSONC}" "#{path}"`
|
||||||
raise "Failed to convert CSON grammar '#{path}': #{$?.to_s}" unless $?.success?
|
raise "Failed to convert CSON grammar '#{path}': #{$?.to_s}" unless $?.success?
|
||||||
JSON.parse(cson)
|
JSON.parse(cson)
|
||||||
|
when '.json'
|
||||||
|
JSON.parse(File.read(path))
|
||||||
else
|
else
|
||||||
raise "Invalid document type #{path}"
|
raise "Invalid document type #{path}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def install_grammar(tmp_dir, source, all_scopes)
|
def load_grammars(tmp_dir, source, all_scopes)
|
||||||
is_url = source.start_with?("http:", "https:")
|
is_url = source.start_with?("http:", "https:")
|
||||||
is_single_file = source.end_with?('.tmLanguage', '.plist')
|
return [] if is_url && !$options[:remote]
|
||||||
|
|
||||||
p = if !is_url
|
p = if !is_url
|
||||||
if is_single_file
|
if File.directory?(source)
|
||||||
SingleFile.new(source)
|
|
||||||
else
|
|
||||||
DirectoryPackage.new(source)
|
DirectoryPackage.new(source)
|
||||||
|
else
|
||||||
|
SingleFile.new(source)
|
||||||
end
|
end
|
||||||
elsif is_single_file
|
elsif source.end_with?('.tmLanguage', '.plist')
|
||||||
SingleGrammar.new(source)
|
SingleGrammar.new(source)
|
||||||
elsif source.start_with?('https://github.com')
|
elsif source.start_with?('https://github.com')
|
||||||
GitHubPackage.new(source)
|
GitHubPackage.new(source)
|
||||||
@@ -172,9 +183,7 @@ def install_grammar(tmp_dir, source, all_scopes)
|
|||||||
|
|
||||||
raise "Unsupported source: #{source}" unless p
|
raise "Unsupported source: #{source}" unless p
|
||||||
|
|
||||||
installed = []
|
p.fetch(tmp_dir).map do |path|
|
||||||
|
|
||||||
p.fetch(tmp_dir).each do |path|
|
|
||||||
grammar = load_grammar(path)
|
grammar = load_grammar(path)
|
||||||
scope = grammar['scopeName']
|
scope = grammar['scopeName']
|
||||||
|
|
||||||
@@ -184,13 +193,21 @@ def install_grammar(tmp_dir, source, all_scopes)
|
|||||||
" Previous package: #{all_scopes[scope]}"
|
" Previous package: #{all_scopes[scope]}"
|
||||||
next
|
next
|
||||||
end
|
end
|
||||||
|
|
||||||
File.write(File.join(GRAMMARS_PATH, "#{scope}.json"), JSON.pretty_generate(grammar))
|
|
||||||
all_scopes[scope] = p.url
|
all_scopes[scope] = p.url
|
||||||
|
grammar
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def install_grammars(grammars, path)
|
||||||
|
installed = []
|
||||||
|
|
||||||
|
grammars.each do |grammar|
|
||||||
|
scope = grammar['scopeName']
|
||||||
|
File.write(File.join(GRAMMARS_PATH, "#{scope}.json"), JSON.pretty_generate(grammar))
|
||||||
installed << scope
|
installed << scope
|
||||||
end
|
end
|
||||||
|
|
||||||
$stderr.puts("OK #{p.url} (#{installed.join(', ')})")
|
$stderr.puts("OK #{path} (#{installed.join(', ')})")
|
||||||
end
|
end
|
||||||
|
|
||||||
def run_thread(queue, all_scopes)
|
def run_thread(queue, all_scopes)
|
||||||
@@ -206,7 +223,8 @@ def run_thread(queue, all_scopes)
|
|||||||
dir = "#{tmpdir}/#{index}"
|
dir = "#{tmpdir}/#{index}"
|
||||||
Dir.mkdir(dir)
|
Dir.mkdir(dir)
|
||||||
|
|
||||||
install_grammar(dir, source, all_scopes)
|
grammars = load_grammars(dir, source, all_scopes)
|
||||||
|
install_grammars(grammars, source) if $options[:install]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@@ -217,7 +235,7 @@ def generate_yaml(all_scopes, base)
|
|||||||
out[value] << key
|
out[value] << key
|
||||||
end
|
end
|
||||||
|
|
||||||
yaml = yaml.sort.to_h
|
yaml = Hash[yaml.sort]
|
||||||
yaml.each { |k, v| v.sort! }
|
yaml.each { |k, v| v.sort! }
|
||||||
yaml
|
yaml
|
||||||
end
|
end
|
||||||
@@ -232,9 +250,10 @@ def main(sources)
|
|||||||
|
|
||||||
all_scopes = {}
|
all_scopes = {}
|
||||||
|
|
||||||
if ARGV[0] == '--add'
|
if source = $options[:add]
|
||||||
Dir.mktmpdir do |tmpdir|
|
Dir.mktmpdir do |tmpdir|
|
||||||
install_grammar(tmpdir, ARGV[1], all_scopes)
|
grammars = load_grammars(tmpdir, source, all_scopes)
|
||||||
|
install_grammars(grammars, source) if $options[:install]
|
||||||
end
|
end
|
||||||
generate_yaml(all_scopes, sources)
|
generate_yaml(all_scopes, sources)
|
||||||
else
|
else
|
||||||
@@ -252,12 +271,34 @@ def main(sources)
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
OptionParser.new do |opts|
|
||||||
|
opts.banner = "Usage: #{$0} [options]"
|
||||||
|
|
||||||
|
opts.on("--add GRAMMAR", "Add a new grammar. GRAMMAR may be a file path or URL.") do |a|
|
||||||
|
$options[:add] = a
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("--[no-]install", "Install grammars into grammars/ directory.") do |i|
|
||||||
|
$options[:install] = i
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("--output FILE", "Write output to FILE. Use - for stdout.") do |o|
|
||||||
|
$options[:output] = o == "-" ? $stdout : o
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("--[no-]remote", "Download remote grammars.") do |r|
|
||||||
|
$options[:remote] = r
|
||||||
|
end
|
||||||
|
end.parse!
|
||||||
|
|
||||||
sources = File.open(SOURCES_FILE) do |file|
|
sources = File.open(SOURCES_FILE) do |file|
|
||||||
YAML.load(file)
|
YAML.load(file)
|
||||||
end
|
end
|
||||||
|
|
||||||
yaml = main(sources)
|
yaml = main(sources)
|
||||||
|
|
||||||
File.write(SOURCES_FILE, YAML.dump(yaml))
|
if $options[:output].is_a?(IO)
|
||||||
|
$options[:output].write(YAML.dump(yaml))
|
||||||
$stderr.puts("Done")
|
else
|
||||||
|
File.write($options[:output], YAML.dump(yaml))
|
||||||
|
end
|
||||||
20
script/travis/before_install
Executable file
20
script/travis/before_install
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
# Fetch all commits/refs needed to run our tests.
|
||||||
|
git fetch origin master:master v2.0.0:v2.0.0 test/attributes:test/attributes test/master:test/master
|
||||||
|
|
||||||
|
script/vendor-deb libicu48 libicu-dev
|
||||||
|
if ruby -e 'exit RUBY_VERSION >= "2.0" && RUBY_VERSION < "2.1"'; then
|
||||||
|
# Workaround for https://bugs.ruby-lang.org/issues/8074. We can't use this
|
||||||
|
# solution on all versions of Ruby due to
|
||||||
|
# https://github.com/bundler/bundler/pull/3338.
|
||||||
|
bundle config build.charlock_holmes --with-icu-include=$(pwd)/vendor/debs/include --with-icu-lib=$(pwd)/vendor/debs/lib
|
||||||
|
else
|
||||||
|
bundle config build.charlock_holmes --with-icu-dir=$(pwd)/vendor/debs
|
||||||
|
fi
|
||||||
|
|
||||||
|
git submodule init
|
||||||
|
git submodule sync --quiet
|
||||||
|
script/fast-submodule-update
|
||||||
13
script/vendor-deb
Executable file
13
script/vendor-deb
Executable file
@@ -0,0 +1,13 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
cd "$(dirname "$0")/.."
|
||||||
|
|
||||||
|
mkdir -p vendor/apt vendor/debs
|
||||||
|
|
||||||
|
(cd vendor/apt && apt-get --assume-yes download "$@")
|
||||||
|
|
||||||
|
for deb in vendor/apt/*.deb; do
|
||||||
|
ar p $deb data.tar.gz | tar -vzxC vendor/debs --strip-components=2
|
||||||
|
done
|
||||||
@@ -3,6 +3,14 @@ require_relative "./helper"
|
|||||||
class TestGrammars < Minitest::Test
|
class TestGrammars < Minitest::Test
|
||||||
ROOT = File.expand_path("../..", __FILE__)
|
ROOT = File.expand_path("../..", __FILE__)
|
||||||
|
|
||||||
|
# These grammars have no license but have been grandfathered in. New grammars
|
||||||
|
# must have a license that allows redistribution.
|
||||||
|
UNLICENSED_GRAMMARS_WHITELIST = %w[
|
||||||
|
vendor/grammars/Sublime-Lasso
|
||||||
|
vendor/grammars/Sublime-REBOL
|
||||||
|
vendor/grammars/x86-assembly-textmate-bundle
|
||||||
|
].freeze
|
||||||
|
|
||||||
def setup
|
def setup
|
||||||
@grammars = YAML.load(File.read(File.join(ROOT, "grammars.yml")))
|
@grammars = YAML.load(File.read(File.join(ROOT, "grammars.yml")))
|
||||||
end
|
end
|
||||||
@@ -14,12 +22,11 @@ class TestGrammars < Minitest::Test
|
|||||||
end
|
end
|
||||||
|
|
||||||
def test_submodules_are_in_sync
|
def test_submodules_are_in_sync
|
||||||
submodules = `git config --list --file "#{File.join(ROOT, ".gitmodules")}"`.lines.grep(/\.path=/).map { |line| line.chomp.split("=", 2).last }
|
|
||||||
# Strip off paths inside the submodule so that just the submodule path remains.
|
# Strip off paths inside the submodule so that just the submodule path remains.
|
||||||
listed_submodules = @grammars.keys.grep(/vendor\/grammars/).map { |source| source[%r{vendor/grammars/[^/]+}] }
|
listed_submodules = @grammars.keys.grep(/vendor\/grammars/).map { |source| source[%r{vendor/grammars/[^/]+}] }
|
||||||
|
|
||||||
nonexistent_submodules = listed_submodules - submodules
|
nonexistent_submodules = listed_submodules - submodule_paths
|
||||||
unlisted_submodules = submodules - listed_submodules
|
unlisted_submodules = submodule_paths - listed_submodules
|
||||||
|
|
||||||
message = ""
|
message = ""
|
||||||
unless nonexistent_submodules.empty?
|
unless nonexistent_submodules.empty?
|
||||||
@@ -36,4 +43,94 @@ class TestGrammars < Minitest::Test
|
|||||||
|
|
||||||
assert nonexistent_submodules.empty? && unlisted_submodules.empty?, message
|
assert nonexistent_submodules.empty? && unlisted_submodules.empty?, message
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def test_local_scopes_are_in_sync
|
||||||
|
actual = YAML.load(`"#{File.join(ROOT, "script", "convert-grammars")}" --output - --no-install --no-remote`)
|
||||||
|
assert $?.success?, "script/convert-grammars failed"
|
||||||
|
|
||||||
|
# We're not checking remote grammars. That can take a long time and make CI
|
||||||
|
# flaky if network conditions are poor.
|
||||||
|
@grammars.delete_if { |k, v| k.start_with?("http:", "https:") }
|
||||||
|
|
||||||
|
@grammars.each do |k, v|
|
||||||
|
assert_equal v, actual[k], "The scopes listed for #{k} in grammars.yml don't match the scopes found in that repository"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_submodules_have_licenses
|
||||||
|
categories = submodule_paths.group_by do |submodule|
|
||||||
|
files = Dir[File.join(ROOT, submodule, "*")]
|
||||||
|
license = files.find { |path| File.basename(path) =~ /\blicen[cs]e\b/i } || files.find { |path| File.basename(path) =~ /\bcopying\b/i }
|
||||||
|
if license.nil?
|
||||||
|
if readme = files.find { |path| File.basename(path) =~ /\Areadme\b/i }
|
||||||
|
license = readme if File.read(readme) =~ /\blicen[cs]e\b/i
|
||||||
|
end
|
||||||
|
end
|
||||||
|
if license.nil?
|
||||||
|
:unlicensed
|
||||||
|
elsif classify_license(license)
|
||||||
|
:licensed
|
||||||
|
else
|
||||||
|
:unrecognized
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
unlicensed = categories[:unlicensed] || []
|
||||||
|
unrecognized = categories[:unrecognized] || []
|
||||||
|
disallowed_unlicensed = unlicensed - UNLICENSED_GRAMMARS_WHITELIST
|
||||||
|
disallowed_unrecognized = unrecognized - UNLICENSED_GRAMMARS_WHITELIST
|
||||||
|
extra_whitelist_entries = UNLICENSED_GRAMMARS_WHITELIST - (unlicensed | unrecognized)
|
||||||
|
|
||||||
|
message = ""
|
||||||
|
if disallowed_unlicensed.any?
|
||||||
|
message << "The following grammar submodules don't seem to have a license. All grammars must have a license that permits redistribution.\n"
|
||||||
|
message << disallowed_unlicensed.sort.join("\n")
|
||||||
|
end
|
||||||
|
if disallowed_unrecognized.any?
|
||||||
|
message << "\n\n" unless message.empty?
|
||||||
|
message << "The following grammar submodules have an unrecognized license. Please update #{__FILE__} to recognize the license.\n"
|
||||||
|
message << disallowed_unrecognized.sort.join("\n")
|
||||||
|
end
|
||||||
|
if extra_whitelist_entries.any?
|
||||||
|
message << "\n\n" unless message.empty?
|
||||||
|
message << "The following grammar submodules are listed in UNLICENSED_GRAMMARS_WHITELIST but either have a license (yay!)\n"
|
||||||
|
message << "or have been removed from the repository. Please remove them from the whitelist.\n"
|
||||||
|
message << extra_whitelist_entries.sort.join("\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
assert disallowed_unlicensed.empty? && disallowed_unrecognized.empty? && extra_whitelist_entries.empty?, message
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def submodule_paths
|
||||||
|
@submodule_paths ||= `git config --list --file "#{File.join(ROOT, ".gitmodules")}"`.lines.grep(/\.path=/).map { |line| line.chomp.split("=", 2).last }
|
||||||
|
end
|
||||||
|
|
||||||
|
def classify_license(path)
|
||||||
|
content = File.read(path)
|
||||||
|
if content.include?("Apache License") && content.include?("2.0")
|
||||||
|
"Apache 2.0"
|
||||||
|
elsif content.include?("GNU") && content =~ /general/i && content =~ /public/i
|
||||||
|
if content =~ /version 2/i
|
||||||
|
"GPLv2"
|
||||||
|
elsif content =~ /version 3/i
|
||||||
|
"GPLv3"
|
||||||
|
end
|
||||||
|
elsif content.include?("GPL") && content.include?("http://www.gnu.org/licenses/gpl.html")
|
||||||
|
"GPLv3"
|
||||||
|
elsif content.include?("Creative Commons")
|
||||||
|
"CC"
|
||||||
|
elsif content.include?("tidy-license.txt") || content.include?("If not otherwise specified (see below)")
|
||||||
|
"textmate"
|
||||||
|
elsif content =~ /^\s*[*-]\s+Redistribution/ || content.include?("Redistributions of source code")
|
||||||
|
"BSD"
|
||||||
|
elsif content.include?("Permission is hereby granted") || content =~ /\bMIT\b/
|
||||||
|
"MIT"
|
||||||
|
elsif content.include?("unlicense.org")
|
||||||
|
"unlicense"
|
||||||
|
elsif content.include?("http://www.wtfpl.net/txt/copying/")
|
||||||
|
"WTFPL"
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
2
vendor/grammars/Agda.tmbundle
vendored
2
vendor/grammars/Agda.tmbundle
vendored
Submodule vendor/grammars/Agda.tmbundle updated: 784f435f09...68a218c489
2
vendor/grammars/IDL-Syntax
vendored
2
vendor/grammars/IDL-Syntax
vendored
Submodule vendor/grammars/IDL-Syntax updated: 9473b7faaf...3baeaeafac
2
vendor/grammars/NimLime
vendored
2
vendor/grammars/NimLime
vendored
Submodule vendor/grammars/NimLime updated: 7a2fb4e73a...a7067c605b
2
vendor/grammars/SCSS.tmbundle
vendored
2
vendor/grammars/SCSS.tmbundle
vendored
Submodule vendor/grammars/SCSS.tmbundle updated: d6188e579f...4147502063
2
vendor/grammars/Scalate.tmbundle
vendored
2
vendor/grammars/Scalate.tmbundle
vendored
Submodule vendor/grammars/Scalate.tmbundle updated: 4f85314fca...0307535add
1
vendor/grammars/Sublime-HTTP
vendored
Submodule
1
vendor/grammars/Sublime-HTTP
vendored
Submodule
Submodule vendor/grammars/Sublime-HTTP added at 0099998617
1
vendor/grammars/Sublime-Nit
vendored
Submodule
1
vendor/grammars/Sublime-Nit
vendored
Submodule
Submodule vendor/grammars/Sublime-Nit added at 7d8b350392
2
vendor/grammars/Sublime-VimL
vendored
2
vendor/grammars/Sublime-VimL
vendored
Submodule vendor/grammars/Sublime-VimL updated: 6ab7e19a57...366fdc64e3
1
vendor/grammars/carto-atom
vendored
Submodule
1
vendor/grammars/carto-atom
vendored
Submodule
Submodule vendor/grammars/carto-atom added at 8086625aa5
2
vendor/grammars/factor
vendored
2
vendor/grammars/factor
vendored
Submodule vendor/grammars/factor updated: 2dc5590966...2453a785f7
2
vendor/grammars/fsharpbinding
vendored
2
vendor/grammars/fsharpbinding
vendored
Submodule vendor/grammars/fsharpbinding updated: af755c8b01...99d2e9a539
2
vendor/grammars/haxe-sublime-bundle
vendored
2
vendor/grammars/haxe-sublime-bundle
vendored
Submodule vendor/grammars/haxe-sublime-bundle updated: 58cad4780c...e2613bb125
2
vendor/grammars/language-clojure
vendored
2
vendor/grammars/language-clojure
vendored
Submodule vendor/grammars/language-clojure updated: d649d9f5b2...bae6eee855
2
vendor/grammars/language-coffee-script
vendored
2
vendor/grammars/language-coffee-script
vendored
Submodule vendor/grammars/language-coffee-script updated: c6e8d33715...d86c8963dc
2
vendor/grammars/language-gfm
vendored
2
vendor/grammars/language-gfm
vendored
Submodule vendor/grammars/language-gfm updated: c6df027b07...6af44a0871
1
vendor/grammars/language-hy
vendored
Submodule
1
vendor/grammars/language-hy
vendored
Submodule
Submodule vendor/grammars/language-hy added at f9750744ae
2
vendor/grammars/language-javascript
vendored
2
vendor/grammars/language-javascript
vendored
Submodule vendor/grammars/language-javascript updated: 15dc5d1d86...515751937d
2
vendor/grammars/language-python
vendored
2
vendor/grammars/language-python
vendored
Submodule vendor/grammars/language-python updated: 476a353595...46072e32e3
1
vendor/grammars/language-sass
vendored
1
vendor/grammars/language-sass
vendored
Submodule vendor/grammars/language-sass deleted from 064a8b5a87
2
vendor/grammars/language-shellscript
vendored
2
vendor/grammars/language-shellscript
vendored
Submodule vendor/grammars/language-shellscript updated: e2d62af11a...9839719721
2
vendor/grammars/language-yaml
vendored
2
vendor/grammars/language-yaml
vendored
Submodule vendor/grammars/language-yaml updated: eddd079347...ce8b441467
2
vendor/grammars/latex.tmbundle
vendored
2
vendor/grammars/latex.tmbundle
vendored
Submodule vendor/grammars/latex.tmbundle updated: 682c4b725c...52b2251aab
2
vendor/grammars/mercury-tmlanguage
vendored
2
vendor/grammars/mercury-tmlanguage
vendored
Submodule vendor/grammars/mercury-tmlanguage updated: b5a4fd6e40...eaef0b0643
1
vendor/grammars/sass-textmate-bundle
vendored
Submodule
1
vendor/grammars/sass-textmate-bundle
vendored
Submodule
Submodule vendor/grammars/sass-textmate-bundle added at 8444f9796e
2
vendor/grammars/sublime-mask
vendored
2
vendor/grammars/sublime-mask
vendored
Submodule vendor/grammars/sublime-mask updated: 2f59519ffd...6f12d2841d
2
vendor/grammars/swift.tmbundle
vendored
2
vendor/grammars/swift.tmbundle
vendored
Submodule vendor/grammars/swift.tmbundle updated: 81a0164145...3c7eac5445
Reference in New Issue
Block a user