mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
79 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4d033e7e83 | ||
|
|
efc3638065 | ||
|
|
b7685ab317 | ||
|
|
83c5f6a004 | ||
|
|
aa5a94cc3e | ||
|
|
a5b6331ab5 | ||
|
|
2164b28c64 | ||
|
|
0fb824b345 | ||
|
|
29ee094d66 | ||
|
|
4a7ae50ec8 | ||
|
|
398439a937 | ||
|
|
a3bc3a7615 | ||
|
|
7989fbd613 | ||
|
|
c389c79be9 | ||
|
|
1fd2f921fd | ||
|
|
ed851849db | ||
|
|
cfb9f6f0a4 | ||
|
|
3d5a0da62e | ||
|
|
4e15369f9a | ||
|
|
5b3152d99d | ||
|
|
a6955f4edb | ||
|
|
280ef7d1bd | ||
|
|
8d2ea90a5b | ||
|
|
4bf7abd73d | ||
|
|
8f251e6756 | ||
|
|
4cd35c1f33 | ||
|
|
78fda33707 | ||
|
|
5c6a98f479 | ||
|
|
efbcb942c3 | ||
|
|
f3da1bc3b1 | ||
|
|
72a6186f08 | ||
|
|
8cde6d2e8f | ||
|
|
4f2c7fdc3c | ||
|
|
5a830504a4 | ||
|
|
086fb09038 | ||
|
|
5544a041ce | ||
|
|
6447333368 | ||
|
|
1d6a42f0eb | ||
|
|
de14b75517 | ||
|
|
0f302713da | ||
|
|
a66d064d4a | ||
|
|
4fefe2020f | ||
|
|
72fab07a14 | ||
|
|
adbf4f6b17 | ||
|
|
cfcf4ca915 | ||
|
|
c427fba87f | ||
|
|
ab14bcab03 | ||
|
|
78de3fb959 | ||
|
|
b9eda90ddd | ||
|
|
66b346c8fb | ||
|
|
8215b225d9 | ||
|
|
41da8c6352 | ||
|
|
b7dad4df5e | ||
|
|
1a98ccbf5f | ||
|
|
8d16a3365e | ||
|
|
67bf48fafc | ||
|
|
9a3c9a8c19 | ||
|
|
6a192dae63 | ||
|
|
f5895216a8 | ||
|
|
09a33f8daa | ||
|
|
185db0e8d5 | ||
|
|
85efbde3f7 | ||
|
|
93d7aa3d07 | ||
|
|
e96096f786 | ||
|
|
0a850eeddd | ||
|
|
42658ffd61 | ||
|
|
24fc2842d2 | ||
|
|
9b6a7622d2 | ||
|
|
6c666075b5 | ||
|
|
6d26bf5c82 | ||
|
|
51dde1f6a4 | ||
|
|
13c9259d23 | ||
|
|
a22c2d678b | ||
|
|
5c36f8df85 | ||
|
|
c3642ba7ed | ||
|
|
a148d52aed | ||
|
|
26fbc45baf | ||
|
|
89795ebd1f | ||
|
|
3ecc1f883c |
@@ -143,8 +143,8 @@ If you are the current maintainer of this gem:
|
|||||||
0. Make sure your local dependencies are up to date: `bundle install`
|
0. Make sure your local dependencies are up to date: `bundle install`
|
||||||
0. Ensure that samples are updated: `bundle exec rake samples`
|
0. Ensure that samples are updated: `bundle exec rake samples`
|
||||||
0. Ensure that tests are green: `bundle exec rake test`
|
0. Ensure that tests are green: `bundle exec rake test`
|
||||||
0. Bump gem version in github-linguist.gemspec. For example, [like this](https://github.com/github/linguist/commit/97908204a385940e47251af9ecb689e8f6515c48).
|
0. Bump gem version in `lib/linguist/version.rb`. For example, [like this](https://github.com/github/linguist/commit/8d2ea90a5ba3b2fe6e1508b7155aa4632eea2985).
|
||||||
0. Make a PR to github/linguist. For example, [#1075](https://github.com/github/linguist/pull/1075).
|
0. Make a PR to github/linguist. For example, [#1238](https://github.com/github/linguist/pull/1238).
|
||||||
0. Build a local gem: `gem build github-linguist.gemspec`
|
0. Build a local gem: `gem build github-linguist.gemspec`
|
||||||
0. Testing:
|
0. Testing:
|
||||||
0. Bump the Gemfile and Gemfile.lock versions for an app which relies on this gem
|
0. Bump the Gemfile and Gemfile.lock versions for an app which relies on this gem
|
||||||
|
|||||||
@@ -241,7 +241,31 @@ module Linguist
|
|||||||
def lines
|
def lines
|
||||||
@lines ||=
|
@lines ||=
|
||||||
if viewable? && data
|
if viewable? && data
|
||||||
data.split(/\r\n|\r|\n/, -1)
|
# `data` is usually encoded as ASCII-8BIT even when the content has
|
||||||
|
# been detected as a different encoding. However, we are not allowed
|
||||||
|
# to change the encoding of `data` because we've made the implicit
|
||||||
|
# guarantee that each entry in `lines` is encoded the same way as
|
||||||
|
# `data`.
|
||||||
|
#
|
||||||
|
# Instead, we re-encode each possible newline sequence as the
|
||||||
|
# detected encoding, then force them back to the encoding of `data`
|
||||||
|
# (usually a binary encoding like ASCII-8BIT). This means that the
|
||||||
|
# byte sequence will match how newlines are likely encoded in the
|
||||||
|
# file, but we don't have to change the encoding of `data` as far as
|
||||||
|
# Ruby is concerned. This allows us to correctly parse out each line
|
||||||
|
# without changing the encoding of `data`, and
|
||||||
|
# also--importantly--without having to duplicate many (potentially
|
||||||
|
# large) strings.
|
||||||
|
begin
|
||||||
|
encoded_newlines = ["\r\n", "\r", "\n"].
|
||||||
|
map { |nl| nl.encode(encoding, "ASCII-8BIT").force_encoding(data.encoding) }
|
||||||
|
|
||||||
|
data.split(Regexp.union(encoded_newlines), -1)
|
||||||
|
rescue Encoding::ConverterNotFoundError
|
||||||
|
# The data is not splittable in the detected encoding. Assume it's
|
||||||
|
# one big line.
|
||||||
|
[data]
|
||||||
|
end
|
||||||
else
|
else
|
||||||
[]
|
[]
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -293,6 +293,7 @@ C++:
|
|||||||
- .inl
|
- .inl
|
||||||
- .tcc
|
- .tcc
|
||||||
- .tpp
|
- .tpp
|
||||||
|
- .ipp
|
||||||
|
|
||||||
C-ObjDump:
|
C-ObjDump:
|
||||||
type: data
|
type: data
|
||||||
@@ -705,6 +706,12 @@ Game Maker Language:
|
|||||||
extensions:
|
extensions:
|
||||||
- .gml
|
- .gml
|
||||||
|
|
||||||
|
GAMS:
|
||||||
|
type: programming
|
||||||
|
lexer: Text only
|
||||||
|
extensions:
|
||||||
|
- .gms
|
||||||
|
|
||||||
GAP:
|
GAP:
|
||||||
type: programming
|
type: programming
|
||||||
lexer: Text only
|
lexer: Text only
|
||||||
@@ -819,6 +826,9 @@ Groovy:
|
|||||||
color: "#e69f56"
|
color: "#e69f56"
|
||||||
extensions:
|
extensions:
|
||||||
- .groovy
|
- .groovy
|
||||||
|
- .grt
|
||||||
|
- .gtpl
|
||||||
|
- .gvy
|
||||||
interpreters:
|
interpreters:
|
||||||
- groovy
|
- groovy
|
||||||
|
|
||||||
@@ -907,7 +917,7 @@ Haskell:
|
|||||||
Haxe:
|
Haxe:
|
||||||
type: programming
|
type: programming
|
||||||
ace_mode: haxe
|
ace_mode: haxe
|
||||||
color: "#346d51"
|
color: "#f7941e"
|
||||||
extensions:
|
extensions:
|
||||||
- .hx
|
- .hx
|
||||||
- .hxsl
|
- .hxsl
|
||||||
@@ -1059,6 +1069,7 @@ JavaScript:
|
|||||||
- ._js
|
- ._js
|
||||||
- .bones
|
- .bones
|
||||||
- .es6
|
- .es6
|
||||||
|
- .frag
|
||||||
- .jake
|
- .jake
|
||||||
- .jsfl
|
- .jsfl
|
||||||
- .jsm
|
- .jsm
|
||||||
@@ -1250,6 +1261,7 @@ Markdown:
|
|||||||
- .md
|
- .md
|
||||||
- .markdown
|
- .markdown
|
||||||
- .mkd
|
- .mkd
|
||||||
|
- .mkdn
|
||||||
- .mkdown
|
- .mkdown
|
||||||
- .ron
|
- .ron
|
||||||
|
|
||||||
@@ -1471,6 +1483,14 @@ Org:
|
|||||||
extensions:
|
extensions:
|
||||||
- .org
|
- .org
|
||||||
|
|
||||||
|
Ox:
|
||||||
|
type: programming
|
||||||
|
lexer: Text only
|
||||||
|
extensions:
|
||||||
|
- .ox
|
||||||
|
- .oxh
|
||||||
|
- .oxo
|
||||||
|
|
||||||
Oxygene:
|
Oxygene:
|
||||||
type: programming
|
type: programming
|
||||||
lexer: Text only
|
lexer: Text only
|
||||||
@@ -1500,6 +1520,13 @@ PHP:
|
|||||||
filenames:
|
filenames:
|
||||||
- Phakefile
|
- Phakefile
|
||||||
|
|
||||||
|
Pan:
|
||||||
|
type: programming
|
||||||
|
lexer: Text only
|
||||||
|
color: '#cc0000'
|
||||||
|
extensions:
|
||||||
|
- .pan
|
||||||
|
|
||||||
Parrot:
|
Parrot:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#f3ca0a"
|
color: "#f3ca0a"
|
||||||
@@ -1698,6 +1725,8 @@ R:
|
|||||||
extensions:
|
extensions:
|
||||||
- .r
|
- .r
|
||||||
- .R
|
- .R
|
||||||
|
- .Rd
|
||||||
|
- .rd
|
||||||
- .rsx
|
- .rsx
|
||||||
filenames:
|
filenames:
|
||||||
- .Rprofile
|
- .Rprofile
|
||||||
@@ -1779,7 +1808,7 @@ Red:
|
|||||||
extensions:
|
extensions:
|
||||||
- .red
|
- .red
|
||||||
- .reds
|
- .reds
|
||||||
|
|
||||||
Redcode:
|
Redcode:
|
||||||
extensions:
|
extensions:
|
||||||
- .cw
|
- .cw
|
||||||
@@ -1842,6 +1871,13 @@ Rust:
|
|||||||
extensions:
|
extensions:
|
||||||
- .rs
|
- .rs
|
||||||
|
|
||||||
|
SAS:
|
||||||
|
type: programming
|
||||||
|
color: "#1E90FF"
|
||||||
|
lexer: Text only
|
||||||
|
extensions:
|
||||||
|
- .sas
|
||||||
|
|
||||||
SCSS:
|
SCSS:
|
||||||
type: markup
|
type: markup
|
||||||
group: CSS
|
group: CSS
|
||||||
@@ -1850,7 +1886,7 @@ SCSS:
|
|||||||
- .scss
|
- .scss
|
||||||
|
|
||||||
SQL:
|
SQL:
|
||||||
type: programming
|
type: data
|
||||||
ace_mode: sql
|
ace_mode: sql
|
||||||
extensions:
|
extensions:
|
||||||
- .sql
|
- .sql
|
||||||
@@ -2010,6 +2046,13 @@ SuperCollider:
|
|||||||
extensions:
|
extensions:
|
||||||
- .scd
|
- .scd
|
||||||
|
|
||||||
|
Swift:
|
||||||
|
type: programming
|
||||||
|
color: "#ffac45"
|
||||||
|
lexer: Text only
|
||||||
|
extensions:
|
||||||
|
- .swift
|
||||||
|
|
||||||
SystemVerilog:
|
SystemVerilog:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#343761"
|
color: "#343761"
|
||||||
@@ -2200,6 +2243,7 @@ XML:
|
|||||||
- .clixml
|
- .clixml
|
||||||
- .cproject
|
- .cproject
|
||||||
- .csproj
|
- .csproj
|
||||||
|
- .ct
|
||||||
- .dita
|
- .dita
|
||||||
- .ditamap
|
- .ditamap
|
||||||
- .ditaval
|
- .ditaval
|
||||||
@@ -2305,6 +2349,14 @@ Zephir:
|
|||||||
extensions:
|
extensions:
|
||||||
- .zep
|
- .zep
|
||||||
|
|
||||||
|
Zimpl:
|
||||||
|
type: programming
|
||||||
|
lexer: Text only
|
||||||
|
extensions:
|
||||||
|
- .zimpl
|
||||||
|
- .zmpl
|
||||||
|
- .zpl
|
||||||
|
|
||||||
eC:
|
eC:
|
||||||
type: programming
|
type: programming
|
||||||
search_term: ec
|
search_term: ec
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -148,7 +148,7 @@
|
|||||||
- (^|/)[Mm]icrosoft([Mm]vc)?([Aa]jax|[Vv]alidation)(\.debug)?\.js$
|
- (^|/)[Mm]icrosoft([Mm]vc)?([Aa]jax|[Vv]alidation)(\.debug)?\.js$
|
||||||
|
|
||||||
# NuGet
|
# NuGet
|
||||||
- ^[Pp]ackages/
|
- ^[Pp]ackages\/.+\.\d+\/
|
||||||
|
|
||||||
# ExtJS
|
# ExtJS
|
||||||
- (^|/)extjs/.*?\.js$
|
- (^|/)extjs/.*?\.js$
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
module Linguist
|
module Linguist
|
||||||
VERSION = "2.11.0"
|
VERSION = "2.11.3"
|
||||||
end
|
end
|
||||||
|
|||||||
664
samples/C++/epoll_reactor.ipp
Normal file
664
samples/C++/epoll_reactor.ipp
Normal file
@@ -0,0 +1,664 @@
|
|||||||
|
//
|
||||||
|
// detail/impl/epoll_reactor.ipp
|
||||||
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
//
|
||||||
|
// Copyright (c) 2003-2013 Christopher M. Kohlhoff (chris at kohlhoff dot com)
|
||||||
|
//
|
||||||
|
// Distributed under the Boost Software License, Version 1.0. (See accompanying
|
||||||
|
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef BOOST_ASIO_DETAIL_IMPL_EPOLL_REACTOR_IPP
|
||||||
|
#define BOOST_ASIO_DETAIL_IMPL_EPOLL_REACTOR_IPP
|
||||||
|
|
||||||
|
#if defined(_MSC_VER) && (_MSC_VER >= 1200)
|
||||||
|
# pragma once
|
||||||
|
#endif // defined(_MSC_VER) && (_MSC_VER >= 1200)
|
||||||
|
|
||||||
|
#include <boost/asio/detail/config.hpp>
|
||||||
|
|
||||||
|
#if defined(BOOST_ASIO_HAS_EPOLL)
|
||||||
|
|
||||||
|
#include <cstddef>
|
||||||
|
#include <sys/epoll.h>
|
||||||
|
#include <boost/asio/detail/epoll_reactor.hpp>
|
||||||
|
#include <boost/asio/detail/throw_error.hpp>
|
||||||
|
#include <boost/asio/error.hpp>
|
||||||
|
|
||||||
|
#if defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
# include <sys/timerfd.h>
|
||||||
|
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
|
||||||
|
#include <boost/asio/detail/push_options.hpp>
|
||||||
|
|
||||||
|
namespace boost {
|
||||||
|
namespace asio {
|
||||||
|
namespace detail {
|
||||||
|
|
||||||
|
epoll_reactor::epoll_reactor(boost::asio::io_service& io_service)
|
||||||
|
: boost::asio::detail::service_base<epoll_reactor>(io_service),
|
||||||
|
io_service_(use_service<io_service_impl>(io_service)),
|
||||||
|
mutex_(),
|
||||||
|
interrupter_(),
|
||||||
|
epoll_fd_(do_epoll_create()),
|
||||||
|
timer_fd_(do_timerfd_create()),
|
||||||
|
shutdown_(false)
|
||||||
|
{
|
||||||
|
// Add the interrupter's descriptor to epoll.
|
||||||
|
epoll_event ev = { 0, { 0 } };
|
||||||
|
ev.events = EPOLLIN | EPOLLERR | EPOLLET;
|
||||||
|
ev.data.ptr = &interrupter_;
|
||||||
|
epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, interrupter_.read_descriptor(), &ev);
|
||||||
|
interrupter_.interrupt();
|
||||||
|
|
||||||
|
// Add the timer descriptor to epoll.
|
||||||
|
if (timer_fd_ != -1)
|
||||||
|
{
|
||||||
|
ev.events = EPOLLIN | EPOLLERR;
|
||||||
|
ev.data.ptr = &timer_fd_;
|
||||||
|
epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, timer_fd_, &ev);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
epoll_reactor::~epoll_reactor()
|
||||||
|
{
|
||||||
|
if (epoll_fd_ != -1)
|
||||||
|
close(epoll_fd_);
|
||||||
|
if (timer_fd_ != -1)
|
||||||
|
close(timer_fd_);
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::shutdown_service()
|
||||||
|
{
|
||||||
|
mutex::scoped_lock lock(mutex_);
|
||||||
|
shutdown_ = true;
|
||||||
|
lock.unlock();
|
||||||
|
|
||||||
|
op_queue<operation> ops;
|
||||||
|
|
||||||
|
while (descriptor_state* state = registered_descriptors_.first())
|
||||||
|
{
|
||||||
|
for (int i = 0; i < max_ops; ++i)
|
||||||
|
ops.push(state->op_queue_[i]);
|
||||||
|
state->shutdown_ = true;
|
||||||
|
registered_descriptors_.free(state);
|
||||||
|
}
|
||||||
|
|
||||||
|
timer_queues_.get_all_timers(ops);
|
||||||
|
|
||||||
|
io_service_.abandon_operations(ops);
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::fork_service(boost::asio::io_service::fork_event fork_ev)
|
||||||
|
{
|
||||||
|
if (fork_ev == boost::asio::io_service::fork_child)
|
||||||
|
{
|
||||||
|
if (epoll_fd_ != -1)
|
||||||
|
::close(epoll_fd_);
|
||||||
|
epoll_fd_ = -1;
|
||||||
|
epoll_fd_ = do_epoll_create();
|
||||||
|
|
||||||
|
if (timer_fd_ != -1)
|
||||||
|
::close(timer_fd_);
|
||||||
|
timer_fd_ = -1;
|
||||||
|
timer_fd_ = do_timerfd_create();
|
||||||
|
|
||||||
|
interrupter_.recreate();
|
||||||
|
|
||||||
|
// Add the interrupter's descriptor to epoll.
|
||||||
|
epoll_event ev = { 0, { 0 } };
|
||||||
|
ev.events = EPOLLIN | EPOLLERR | EPOLLET;
|
||||||
|
ev.data.ptr = &interrupter_;
|
||||||
|
epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, interrupter_.read_descriptor(), &ev);
|
||||||
|
interrupter_.interrupt();
|
||||||
|
|
||||||
|
// Add the timer descriptor to epoll.
|
||||||
|
if (timer_fd_ != -1)
|
||||||
|
{
|
||||||
|
ev.events = EPOLLIN | EPOLLERR;
|
||||||
|
ev.data.ptr = &timer_fd_;
|
||||||
|
epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, timer_fd_, &ev);
|
||||||
|
}
|
||||||
|
|
||||||
|
update_timeout();
|
||||||
|
|
||||||
|
// Re-register all descriptors with epoll.
|
||||||
|
mutex::scoped_lock descriptors_lock(registered_descriptors_mutex_);
|
||||||
|
for (descriptor_state* state = registered_descriptors_.first();
|
||||||
|
state != 0; state = state->next_)
|
||||||
|
{
|
||||||
|
ev.events = state->registered_events_;
|
||||||
|
ev.data.ptr = state;
|
||||||
|
int result = epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, state->descriptor_, &ev);
|
||||||
|
if (result != 0)
|
||||||
|
{
|
||||||
|
boost::system::error_code ec(errno,
|
||||||
|
boost::asio::error::get_system_category());
|
||||||
|
boost::asio::detail::throw_error(ec, "epoll re-registration");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::init_task()
|
||||||
|
{
|
||||||
|
io_service_.init_task();
|
||||||
|
}
|
||||||
|
|
||||||
|
int epoll_reactor::register_descriptor(socket_type descriptor,
|
||||||
|
epoll_reactor::per_descriptor_data& descriptor_data)
|
||||||
|
{
|
||||||
|
descriptor_data = allocate_descriptor_state();
|
||||||
|
|
||||||
|
{
|
||||||
|
mutex::scoped_lock descriptor_lock(descriptor_data->mutex_);
|
||||||
|
|
||||||
|
descriptor_data->reactor_ = this;
|
||||||
|
descriptor_data->descriptor_ = descriptor;
|
||||||
|
descriptor_data->shutdown_ = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
epoll_event ev = { 0, { 0 } };
|
||||||
|
ev.events = EPOLLIN | EPOLLERR | EPOLLHUP | EPOLLPRI | EPOLLET;
|
||||||
|
descriptor_data->registered_events_ = ev.events;
|
||||||
|
ev.data.ptr = descriptor_data;
|
||||||
|
int result = epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, descriptor, &ev);
|
||||||
|
if (result != 0)
|
||||||
|
return errno;
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int epoll_reactor::register_internal_descriptor(
|
||||||
|
int op_type, socket_type descriptor,
|
||||||
|
epoll_reactor::per_descriptor_data& descriptor_data, reactor_op* op)
|
||||||
|
{
|
||||||
|
descriptor_data = allocate_descriptor_state();
|
||||||
|
|
||||||
|
{
|
||||||
|
mutex::scoped_lock descriptor_lock(descriptor_data->mutex_);
|
||||||
|
|
||||||
|
descriptor_data->reactor_ = this;
|
||||||
|
descriptor_data->descriptor_ = descriptor;
|
||||||
|
descriptor_data->shutdown_ = false;
|
||||||
|
descriptor_data->op_queue_[op_type].push(op);
|
||||||
|
}
|
||||||
|
|
||||||
|
epoll_event ev = { 0, { 0 } };
|
||||||
|
ev.events = EPOLLIN | EPOLLERR | EPOLLHUP | EPOLLPRI | EPOLLET;
|
||||||
|
descriptor_data->registered_events_ = ev.events;
|
||||||
|
ev.data.ptr = descriptor_data;
|
||||||
|
int result = epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, descriptor, &ev);
|
||||||
|
if (result != 0)
|
||||||
|
return errno;
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::move_descriptor(socket_type,
|
||||||
|
epoll_reactor::per_descriptor_data& target_descriptor_data,
|
||||||
|
epoll_reactor::per_descriptor_data& source_descriptor_data)
|
||||||
|
{
|
||||||
|
target_descriptor_data = source_descriptor_data;
|
||||||
|
source_descriptor_data = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::start_op(int op_type, socket_type descriptor,
|
||||||
|
epoll_reactor::per_descriptor_data& descriptor_data, reactor_op* op,
|
||||||
|
bool is_continuation, bool allow_speculative)
|
||||||
|
{
|
||||||
|
if (!descriptor_data)
|
||||||
|
{
|
||||||
|
op->ec_ = boost::asio::error::bad_descriptor;
|
||||||
|
post_immediate_completion(op, is_continuation);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
mutex::scoped_lock descriptor_lock(descriptor_data->mutex_);
|
||||||
|
|
||||||
|
if (descriptor_data->shutdown_)
|
||||||
|
{
|
||||||
|
post_immediate_completion(op, is_continuation);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (descriptor_data->op_queue_[op_type].empty())
|
||||||
|
{
|
||||||
|
if (allow_speculative
|
||||||
|
&& (op_type != read_op
|
||||||
|
|| descriptor_data->op_queue_[except_op].empty()))
|
||||||
|
{
|
||||||
|
if (op->perform())
|
||||||
|
{
|
||||||
|
descriptor_lock.unlock();
|
||||||
|
io_service_.post_immediate_completion(op, is_continuation);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (op_type == write_op)
|
||||||
|
{
|
||||||
|
if ((descriptor_data->registered_events_ & EPOLLOUT) == 0)
|
||||||
|
{
|
||||||
|
epoll_event ev = { 0, { 0 } };
|
||||||
|
ev.events = descriptor_data->registered_events_ | EPOLLOUT;
|
||||||
|
ev.data.ptr = descriptor_data;
|
||||||
|
if (epoll_ctl(epoll_fd_, EPOLL_CTL_MOD, descriptor, &ev) == 0)
|
||||||
|
{
|
||||||
|
descriptor_data->registered_events_ |= ev.events;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
op->ec_ = boost::system::error_code(errno,
|
||||||
|
boost::asio::error::get_system_category());
|
||||||
|
io_service_.post_immediate_completion(op, is_continuation);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (op_type == write_op)
|
||||||
|
{
|
||||||
|
descriptor_data->registered_events_ |= EPOLLOUT;
|
||||||
|
}
|
||||||
|
|
||||||
|
epoll_event ev = { 0, { 0 } };
|
||||||
|
ev.events = descriptor_data->registered_events_;
|
||||||
|
ev.data.ptr = descriptor_data;
|
||||||
|
epoll_ctl(epoll_fd_, EPOLL_CTL_MOD, descriptor, &ev);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
descriptor_data->op_queue_[op_type].push(op);
|
||||||
|
io_service_.work_started();
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::cancel_ops(socket_type,
|
||||||
|
epoll_reactor::per_descriptor_data& descriptor_data)
|
||||||
|
{
|
||||||
|
if (!descriptor_data)
|
||||||
|
return;
|
||||||
|
|
||||||
|
mutex::scoped_lock descriptor_lock(descriptor_data->mutex_);
|
||||||
|
|
||||||
|
op_queue<operation> ops;
|
||||||
|
for (int i = 0; i < max_ops; ++i)
|
||||||
|
{
|
||||||
|
while (reactor_op* op = descriptor_data->op_queue_[i].front())
|
||||||
|
{
|
||||||
|
op->ec_ = boost::asio::error::operation_aborted;
|
||||||
|
descriptor_data->op_queue_[i].pop();
|
||||||
|
ops.push(op);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
descriptor_lock.unlock();
|
||||||
|
|
||||||
|
io_service_.post_deferred_completions(ops);
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::deregister_descriptor(socket_type descriptor,
|
||||||
|
epoll_reactor::per_descriptor_data& descriptor_data, bool closing)
|
||||||
|
{
|
||||||
|
if (!descriptor_data)
|
||||||
|
return;
|
||||||
|
|
||||||
|
mutex::scoped_lock descriptor_lock(descriptor_data->mutex_);
|
||||||
|
|
||||||
|
if (!descriptor_data->shutdown_)
|
||||||
|
{
|
||||||
|
if (closing)
|
||||||
|
{
|
||||||
|
// The descriptor will be automatically removed from the epoll set when
|
||||||
|
// it is closed.
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
epoll_event ev = { 0, { 0 } };
|
||||||
|
epoll_ctl(epoll_fd_, EPOLL_CTL_DEL, descriptor, &ev);
|
||||||
|
}
|
||||||
|
|
||||||
|
op_queue<operation> ops;
|
||||||
|
for (int i = 0; i < max_ops; ++i)
|
||||||
|
{
|
||||||
|
while (reactor_op* op = descriptor_data->op_queue_[i].front())
|
||||||
|
{
|
||||||
|
op->ec_ = boost::asio::error::operation_aborted;
|
||||||
|
descriptor_data->op_queue_[i].pop();
|
||||||
|
ops.push(op);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
descriptor_data->descriptor_ = -1;
|
||||||
|
descriptor_data->shutdown_ = true;
|
||||||
|
|
||||||
|
descriptor_lock.unlock();
|
||||||
|
|
||||||
|
free_descriptor_state(descriptor_data);
|
||||||
|
descriptor_data = 0;
|
||||||
|
|
||||||
|
io_service_.post_deferred_completions(ops);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::deregister_internal_descriptor(socket_type descriptor,
|
||||||
|
epoll_reactor::per_descriptor_data& descriptor_data)
|
||||||
|
{
|
||||||
|
if (!descriptor_data)
|
||||||
|
return;
|
||||||
|
|
||||||
|
mutex::scoped_lock descriptor_lock(descriptor_data->mutex_);
|
||||||
|
|
||||||
|
if (!descriptor_data->shutdown_)
|
||||||
|
{
|
||||||
|
epoll_event ev = { 0, { 0 } };
|
||||||
|
epoll_ctl(epoll_fd_, EPOLL_CTL_DEL, descriptor, &ev);
|
||||||
|
|
||||||
|
op_queue<operation> ops;
|
||||||
|
for (int i = 0; i < max_ops; ++i)
|
||||||
|
ops.push(descriptor_data->op_queue_[i]);
|
||||||
|
|
||||||
|
descriptor_data->descriptor_ = -1;
|
||||||
|
descriptor_data->shutdown_ = true;
|
||||||
|
|
||||||
|
descriptor_lock.unlock();
|
||||||
|
|
||||||
|
free_descriptor_state(descriptor_data);
|
||||||
|
descriptor_data = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::run(bool block, op_queue<operation>& ops)
|
||||||
|
{
|
||||||
|
// This code relies on the fact that the task_io_service queues the reactor
|
||||||
|
// task behind all descriptor operations generated by this function. This
|
||||||
|
// means, that by the time we reach this point, any previously returned
|
||||||
|
// descriptor operations have already been dequeued. Therefore it is now safe
|
||||||
|
// for us to reuse and return them for the task_io_service to queue again.
|
||||||
|
|
||||||
|
// Calculate a timeout only if timerfd is not used.
|
||||||
|
int timeout;
|
||||||
|
if (timer_fd_ != -1)
|
||||||
|
timeout = block ? -1 : 0;
|
||||||
|
else
|
||||||
|
{
|
||||||
|
mutex::scoped_lock lock(mutex_);
|
||||||
|
timeout = block ? get_timeout() : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Block on the epoll descriptor.
|
||||||
|
epoll_event events[128];
|
||||||
|
int num_events = epoll_wait(epoll_fd_, events, 128, timeout);
|
||||||
|
|
||||||
|
#if defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
bool check_timers = (timer_fd_ == -1);
|
||||||
|
#else // defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
bool check_timers = true;
|
||||||
|
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
|
||||||
|
// Dispatch the waiting events.
|
||||||
|
for (int i = 0; i < num_events; ++i)
|
||||||
|
{
|
||||||
|
void* ptr = events[i].data.ptr;
|
||||||
|
if (ptr == &interrupter_)
|
||||||
|
{
|
||||||
|
// No need to reset the interrupter since we're leaving the descriptor
|
||||||
|
// in a ready-to-read state and relying on edge-triggered notifications
|
||||||
|
// to make it so that we only get woken up when the descriptor's epoll
|
||||||
|
// registration is updated.
|
||||||
|
|
||||||
|
#if defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
if (timer_fd_ == -1)
|
||||||
|
check_timers = true;
|
||||||
|
#else // defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
check_timers = true;
|
||||||
|
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
}
|
||||||
|
#if defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
else if (ptr == &timer_fd_)
|
||||||
|
{
|
||||||
|
check_timers = true;
|
||||||
|
}
|
||||||
|
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// The descriptor operation doesn't count as work in and of itself, so we
|
||||||
|
// don't call work_started() here. This still allows the io_service to
|
||||||
|
// stop if the only remaining operations are descriptor operations.
|
||||||
|
descriptor_state* descriptor_data = static_cast<descriptor_state*>(ptr);
|
||||||
|
descriptor_data->set_ready_events(events[i].events);
|
||||||
|
ops.push(descriptor_data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (check_timers)
|
||||||
|
{
|
||||||
|
mutex::scoped_lock common_lock(mutex_);
|
||||||
|
timer_queues_.get_ready_timers(ops);
|
||||||
|
|
||||||
|
#if defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
if (timer_fd_ != -1)
|
||||||
|
{
|
||||||
|
itimerspec new_timeout;
|
||||||
|
itimerspec old_timeout;
|
||||||
|
int flags = get_timeout(new_timeout);
|
||||||
|
timerfd_settime(timer_fd_, flags, &new_timeout, &old_timeout);
|
||||||
|
}
|
||||||
|
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::interrupt()
|
||||||
|
{
|
||||||
|
epoll_event ev = { 0, { 0 } };
|
||||||
|
ev.events = EPOLLIN | EPOLLERR | EPOLLET;
|
||||||
|
ev.data.ptr = &interrupter_;
|
||||||
|
epoll_ctl(epoll_fd_, EPOLL_CTL_MOD, interrupter_.read_descriptor(), &ev);
|
||||||
|
}
|
||||||
|
|
||||||
|
int epoll_reactor::do_epoll_create()
|
||||||
|
{
|
||||||
|
#if defined(EPOLL_CLOEXEC)
|
||||||
|
int fd = epoll_create1(EPOLL_CLOEXEC);
|
||||||
|
#else // defined(EPOLL_CLOEXEC)
|
||||||
|
int fd = -1;
|
||||||
|
errno = EINVAL;
|
||||||
|
#endif // defined(EPOLL_CLOEXEC)
|
||||||
|
|
||||||
|
if (fd == -1 && (errno == EINVAL || errno == ENOSYS))
|
||||||
|
{
|
||||||
|
fd = epoll_create(epoll_size);
|
||||||
|
if (fd != -1)
|
||||||
|
::fcntl(fd, F_SETFD, FD_CLOEXEC);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fd == -1)
|
||||||
|
{
|
||||||
|
boost::system::error_code ec(errno,
|
||||||
|
boost::asio::error::get_system_category());
|
||||||
|
boost::asio::detail::throw_error(ec, "epoll");
|
||||||
|
}
|
||||||
|
|
||||||
|
return fd;
|
||||||
|
}
|
||||||
|
|
||||||
|
int epoll_reactor::do_timerfd_create()
|
||||||
|
{
|
||||||
|
#if defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
# if defined(TFD_CLOEXEC)
|
||||||
|
int fd = timerfd_create(CLOCK_MONOTONIC, TFD_CLOEXEC);
|
||||||
|
# else // defined(TFD_CLOEXEC)
|
||||||
|
int fd = -1;
|
||||||
|
errno = EINVAL;
|
||||||
|
# endif // defined(TFD_CLOEXEC)
|
||||||
|
|
||||||
|
if (fd == -1 && errno == EINVAL)
|
||||||
|
{
|
||||||
|
fd = timerfd_create(CLOCK_MONOTONIC, 0);
|
||||||
|
if (fd != -1)
|
||||||
|
::fcntl(fd, F_SETFD, FD_CLOEXEC);
|
||||||
|
}
|
||||||
|
|
||||||
|
return fd;
|
||||||
|
#else // defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
return -1;
|
||||||
|
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
}
|
||||||
|
|
||||||
|
epoll_reactor::descriptor_state* epoll_reactor::allocate_descriptor_state()
|
||||||
|
{
|
||||||
|
mutex::scoped_lock descriptors_lock(registered_descriptors_mutex_);
|
||||||
|
return registered_descriptors_.alloc();
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::free_descriptor_state(epoll_reactor::descriptor_state* s)
|
||||||
|
{
|
||||||
|
mutex::scoped_lock descriptors_lock(registered_descriptors_mutex_);
|
||||||
|
registered_descriptors_.free(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::do_add_timer_queue(timer_queue_base& queue)
|
||||||
|
{
|
||||||
|
mutex::scoped_lock lock(mutex_);
|
||||||
|
timer_queues_.insert(&queue);
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::do_remove_timer_queue(timer_queue_base& queue)
|
||||||
|
{
|
||||||
|
mutex::scoped_lock lock(mutex_);
|
||||||
|
timer_queues_.erase(&queue);
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::update_timeout()
|
||||||
|
{
|
||||||
|
#if defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
if (timer_fd_ != -1)
|
||||||
|
{
|
||||||
|
itimerspec new_timeout;
|
||||||
|
itimerspec old_timeout;
|
||||||
|
int flags = get_timeout(new_timeout);
|
||||||
|
timerfd_settime(timer_fd_, flags, &new_timeout, &old_timeout);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
interrupt();
|
||||||
|
}
|
||||||
|
|
||||||
|
int epoll_reactor::get_timeout()
|
||||||
|
{
|
||||||
|
// By default we will wait no longer than 5 minutes. This will ensure that
|
||||||
|
// any changes to the system clock are detected after no longer than this.
|
||||||
|
return timer_queues_.wait_duration_msec(5 * 60 * 1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
#if defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
int epoll_reactor::get_timeout(itimerspec& ts)
|
||||||
|
{
|
||||||
|
ts.it_interval.tv_sec = 0;
|
||||||
|
ts.it_interval.tv_nsec = 0;
|
||||||
|
|
||||||
|
long usec = timer_queues_.wait_duration_usec(5 * 60 * 1000 * 1000);
|
||||||
|
ts.it_value.tv_sec = usec / 1000000;
|
||||||
|
ts.it_value.tv_nsec = usec ? (usec % 1000000) * 1000 : 1;
|
||||||
|
|
||||||
|
return usec ? 0 : TFD_TIMER_ABSTIME;
|
||||||
|
}
|
||||||
|
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
|
||||||
|
|
||||||
|
struct epoll_reactor::perform_io_cleanup_on_block_exit
|
||||||
|
{
|
||||||
|
explicit perform_io_cleanup_on_block_exit(epoll_reactor* r)
|
||||||
|
: reactor_(r), first_op_(0)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
~perform_io_cleanup_on_block_exit()
|
||||||
|
{
|
||||||
|
if (first_op_)
|
||||||
|
{
|
||||||
|
// Post the remaining completed operations for invocation.
|
||||||
|
if (!ops_.empty())
|
||||||
|
reactor_->io_service_.post_deferred_completions(ops_);
|
||||||
|
|
||||||
|
// A user-initiated operation has completed, but there's no need to
|
||||||
|
// explicitly call work_finished() here. Instead, we'll take advantage of
|
||||||
|
// the fact that the task_io_service will call work_finished() once we
|
||||||
|
// return.
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// No user-initiated operations have completed, so we need to compensate
|
||||||
|
// for the work_finished() call that the task_io_service will make once
|
||||||
|
// this operation returns.
|
||||||
|
reactor_->io_service_.work_started();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
epoll_reactor* reactor_;
|
||||||
|
op_queue<operation> ops_;
|
||||||
|
operation* first_op_;
|
||||||
|
};
|
||||||
|
|
||||||
|
epoll_reactor::descriptor_state::descriptor_state()
|
||||||
|
: operation(&epoll_reactor::descriptor_state::do_complete)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
operation* epoll_reactor::descriptor_state::perform_io(uint32_t events)
|
||||||
|
{
|
||||||
|
mutex_.lock();
|
||||||
|
perform_io_cleanup_on_block_exit io_cleanup(reactor_);
|
||||||
|
mutex::scoped_lock descriptor_lock(mutex_, mutex::scoped_lock::adopt_lock);
|
||||||
|
|
||||||
|
// Exception operations must be processed first to ensure that any
|
||||||
|
// out-of-band data is read before normal data.
|
||||||
|
static const int flag[max_ops] = { EPOLLIN, EPOLLOUT, EPOLLPRI };
|
||||||
|
for (int j = max_ops - 1; j >= 0; --j)
|
||||||
|
{
|
||||||
|
if (events & (flag[j] | EPOLLERR | EPOLLHUP))
|
||||||
|
{
|
||||||
|
while (reactor_op* op = op_queue_[j].front())
|
||||||
|
{
|
||||||
|
if (op->perform())
|
||||||
|
{
|
||||||
|
op_queue_[j].pop();
|
||||||
|
io_cleanup.ops_.push(op);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The first operation will be returned for completion now. The others will
|
||||||
|
// be posted for later by the io_cleanup object's destructor.
|
||||||
|
io_cleanup.first_op_ = io_cleanup.ops_.front();
|
||||||
|
io_cleanup.ops_.pop();
|
||||||
|
return io_cleanup.first_op_;
|
||||||
|
}
|
||||||
|
|
||||||
|
void epoll_reactor::descriptor_state::do_complete(
|
||||||
|
io_service_impl* owner, operation* base,
|
||||||
|
const boost::system::error_code& ec, std::size_t bytes_transferred)
|
||||||
|
{
|
||||||
|
if (owner)
|
||||||
|
{
|
||||||
|
descriptor_state* descriptor_data = static_cast<descriptor_state*>(base);
|
||||||
|
uint32_t events = static_cast<uint32_t>(bytes_transferred);
|
||||||
|
if (operation* op = descriptor_data->perform_io(events))
|
||||||
|
{
|
||||||
|
op->complete(*owner, ec, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace detail
|
||||||
|
} // namespace asio
|
||||||
|
} // namespace boost
|
||||||
|
|
||||||
|
#include <boost/asio/detail/pop_options.hpp>
|
||||||
|
|
||||||
|
#endif // defined(BOOST_ASIO_HAS_EPOLL)
|
||||||
|
|
||||||
|
#endif // BOOST_ASIO_DETAIL_IMPL_EPOLL_REACTOR_IPP
|
||||||
76
samples/GAMS/transport.gms
Normal file
76
samples/GAMS/transport.gms
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
*Basic example of transport model from GAMS model library
|
||||||
|
|
||||||
|
$Title A Transportation Problem (TRNSPORT,SEQ=1)
|
||||||
|
$Ontext
|
||||||
|
|
||||||
|
This problem finds a least cost shipping schedule that meets
|
||||||
|
requirements at markets and supplies at factories.
|
||||||
|
|
||||||
|
|
||||||
|
Dantzig, G B, Chapter 3.3. In Linear Programming and Extensions.
|
||||||
|
Princeton University Press, Princeton, New Jersey, 1963.
|
||||||
|
|
||||||
|
This formulation is described in detail in:
|
||||||
|
Rosenthal, R E, Chapter 2: A GAMS Tutorial. In GAMS: A User's Guide.
|
||||||
|
The Scientific Press, Redwood City, California, 1988.
|
||||||
|
|
||||||
|
The line numbers will not match those in the book because of these
|
||||||
|
comments.
|
||||||
|
|
||||||
|
$Offtext
|
||||||
|
|
||||||
|
|
||||||
|
Sets
|
||||||
|
i canning plants / seattle, san-diego /
|
||||||
|
j markets / new-york, chicago, topeka / ;
|
||||||
|
Parameters
|
||||||
|
a(i) capacity of plant i in cases
|
||||||
|
/ seattle 350
|
||||||
|
san-diego 600 /
|
||||||
|
b(j) demand at market j in cases
|
||||||
|
/ new-york 325
|
||||||
|
chicago 300
|
||||||
|
topeka 275 / ;
|
||||||
|
Table d(i,j) distance in thousands of miles
|
||||||
|
new-york chicago topeka
|
||||||
|
seattle 2.5 1.7 1.8
|
||||||
|
san-diego 2.5 1.8 1.4 ;
|
||||||
|
Scalar f freight in dollars per case per thousand miles /90/ ;
|
||||||
|
Parameter c(i,j) transport cost in thousands of dollars per case ;
|
||||||
|
c(i,j) = f * d(i,j) / 1000 ;
|
||||||
|
Variables
|
||||||
|
x(i,j) shipment quantities in cases
|
||||||
|
z total transportation costs in thousands of dollars ;
|
||||||
|
|
||||||
|
Positive Variable x ;
|
||||||
|
|
||||||
|
Equations
|
||||||
|
cost define objective function
|
||||||
|
supply(i) observe supply limit at plant i
|
||||||
|
demand(j) satisfy demand at market j ;
|
||||||
|
|
||||||
|
cost .. z =e= sum((i,j), c(i,j)*x(i,j)) ;
|
||||||
|
|
||||||
|
supply(i) .. sum(j, x(i,j)) =l= a(i) ;
|
||||||
|
|
||||||
|
demand(j) .. sum(i, x(i,j)) =g= b(j) ;
|
||||||
|
|
||||||
|
Model transport /all/ ;
|
||||||
|
|
||||||
|
Solve transport using lp minimizing z ;
|
||||||
|
|
||||||
|
Display x.l, x.m ;
|
||||||
|
|
||||||
|
$ontext
|
||||||
|
#user model library stuff
|
||||||
|
Main topic Basic GAMS
|
||||||
|
Featured item 1 Trnsport model
|
||||||
|
Featured item 2
|
||||||
|
Featured item 3
|
||||||
|
Featured item 4
|
||||||
|
Description
|
||||||
|
Basic example of transport model from GAMS model library
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
$offtext
|
||||||
9
samples/GLSL/SimpleLighting.gl2.frag
Normal file
9
samples/GLSL/SimpleLighting.gl2.frag
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
static const char* SimpleFragmentShader = STRINGIFY(
|
||||||
|
|
||||||
|
varying vec4 FrontColor;
|
||||||
|
|
||||||
|
void main(void)
|
||||||
|
{
|
||||||
|
gl_FragColor = FrontColor;
|
||||||
|
}
|
||||||
|
);
|
||||||
48
samples/GLSL/recurse1.frag
Normal file
48
samples/GLSL/recurse1.frag
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
#version 330 core
|
||||||
|
|
||||||
|
// cross-unit recursion
|
||||||
|
|
||||||
|
void main() {}
|
||||||
|
|
||||||
|
// two-level recursion
|
||||||
|
|
||||||
|
float cbar(int);
|
||||||
|
|
||||||
|
void cfoo(float)
|
||||||
|
{
|
||||||
|
cbar(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
// four-level, out of order
|
||||||
|
|
||||||
|
void CB();
|
||||||
|
void CD();
|
||||||
|
void CA() { CB(); }
|
||||||
|
void CC() { CD(); }
|
||||||
|
|
||||||
|
// high degree
|
||||||
|
|
||||||
|
void CBT();
|
||||||
|
void CDT();
|
||||||
|
void CAT() { CBT(); CBT(); CBT(); }
|
||||||
|
void CCT() { CDT(); CDT(); CBT(); }
|
||||||
|
|
||||||
|
// not recursive
|
||||||
|
|
||||||
|
void norA() {}
|
||||||
|
void norB() { norA(); }
|
||||||
|
void norC() { norA(); }
|
||||||
|
void norD() { norA(); }
|
||||||
|
void norE() { norB(); }
|
||||||
|
void norF() { norB(); }
|
||||||
|
void norG() { norE(); }
|
||||||
|
void norH() { norE(); }
|
||||||
|
void norI() { norE(); }
|
||||||
|
|
||||||
|
// not recursive, but with a call leading into a cycle if ignoring direction
|
||||||
|
|
||||||
|
void norcA() { }
|
||||||
|
void norcB() { norcA(); }
|
||||||
|
void norcC() { norcB(); }
|
||||||
|
void norcD() { norcC(); norcB(); } // head of cycle
|
||||||
|
void norcE() { norcD(); } // lead into cycle
|
||||||
2
samples/Groovy/script.gvy
Normal file
2
samples/Groovy/script.gvy
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
#!/usr/bin/env groovy
|
||||||
|
println "Hello World"
|
||||||
9
samples/Groovy/template.grt
Normal file
9
samples/Groovy/template.grt
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
html {
|
||||||
|
head {
|
||||||
|
component "bootstrap"
|
||||||
|
title "Bootstrap Template"
|
||||||
|
}
|
||||||
|
|
||||||
|
html {
|
||||||
|
}
|
||||||
|
}
|
||||||
9
samples/Groovy/template.gtpl
Normal file
9
samples/Groovy/template.gtpl
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
html {
|
||||||
|
head {
|
||||||
|
title "Example Template"
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
p "This is a quick template example"
|
||||||
|
}
|
||||||
|
}
|
||||||
7
samples/JavaScript/intro.js.frag
Normal file
7
samples/JavaScript/intro.js.frag
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
(function(window, angular) {
|
||||||
|
|
||||||
|
Array.prototype.last = function() {
|
||||||
|
return this[this.length-1];
|
||||||
|
};
|
||||||
|
|
||||||
|
var app = angular.module('ConwayGameOfLife', []);
|
||||||
3
samples/JavaScript/outro.js.frag
Normal file
3
samples/JavaScript/outro.js.frag
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
|
||||||
|
})(window, window.angular);
|
||||||
|
|
||||||
72
samples/Ox/IJCEmet2009.oxh
Normal file
72
samples/Ox/IJCEmet2009.oxh
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
/** Replicate Imai, Jain and Ching Econometrica 2009 (incomplete).
|
||||||
|
|
||||||
|
**/
|
||||||
|
#include "IJCEmet2009.h"
|
||||||
|
|
||||||
|
Kapital::Kapital(L,const N,const entrant,const exit,const KP){
|
||||||
|
StateVariable(L,N);
|
||||||
|
this.entrant = entrant;
|
||||||
|
this.exit = exit;
|
||||||
|
this.KP = KP;
|
||||||
|
actual = Kbar*vals/(N-1);
|
||||||
|
upper = log(actual~.Inf);
|
||||||
|
}
|
||||||
|
|
||||||
|
Kapital::Transit(FeasA) {
|
||||||
|
decl ent =CV(entrant), stayout = FeasA[][exit.pos], tprob, sigu = CV(KP[SigU]);
|
||||||
|
if (!v && !ent) return { <0>, ones(stayout) };
|
||||||
|
tprob = ent ? probn( (upper-CV(KP[Kbe]))/sigu )
|
||||||
|
: probn( (upper-(CV(KP[Kb0])+CV(KP[Kb2])*upper[v])) / sigu );
|
||||||
|
tprob = tprob[1:] - tprob[:N-1];
|
||||||
|
return { vals, tprob.*(1-stayout)+(1.0~zeros(1,N-1)).*stayout };
|
||||||
|
}
|
||||||
|
|
||||||
|
FirmEntry::Run() {
|
||||||
|
Initialize();
|
||||||
|
GenerateSample();
|
||||||
|
BDP->BayesianDP();
|
||||||
|
}
|
||||||
|
|
||||||
|
FirmEntry::Initialize() {
|
||||||
|
Rust::Initialize(Reachable,0);
|
||||||
|
sige = new StDeviations("sige",<0.3,0.3>,0);
|
||||||
|
entrant = new LaggedAction("entrant",d);
|
||||||
|
KP = new array[Kparams];
|
||||||
|
KP[Kbe] = new Positive("be",0.5);
|
||||||
|
KP[Kb0] = new Free("b0",0.0);
|
||||||
|
KP[Kb1] = new Determined("b1",0.0);
|
||||||
|
KP[Kb2] = new Positive("b2",0.4);
|
||||||
|
KP[SigU] = new Positive("sigu",0.4);
|
||||||
|
EndogenousStates(K = new Kapital("K",KN,entrant,d,KP),entrant);
|
||||||
|
SetDelta(new Probability("delta",0.85));
|
||||||
|
kcoef = new Positive("kcoef",0.1);
|
||||||
|
ecost = new Negative("ec",-0.4);
|
||||||
|
CreateSpaces();
|
||||||
|
}
|
||||||
|
|
||||||
|
FirmEntry::GenerateSample() {
|
||||||
|
Volume = LOUD;
|
||||||
|
EM = new ValueIteration(0);
|
||||||
|
// EM -> Solve(0,0);
|
||||||
|
data = new DataSet(0,EM);
|
||||||
|
data->Simulate(DataN,DataT,0,FALSE);
|
||||||
|
data->Print("firmentry.xls");
|
||||||
|
BDP = new ImaiJainChing("FMH",data,EM,ecost,sige,kcoef,KP,delta);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Capital stock can be positive only for incumbents.
|
||||||
|
**/
|
||||||
|
FirmEntry::Reachable() { return CV(entrant)*CV(K) ? 0 : new FirmEntry() ; }
|
||||||
|
|
||||||
|
/** The one period return.
|
||||||
|
<DD>
|
||||||
|
<pre>U = </pre>
|
||||||
|
</DD>
|
||||||
|
**/
|
||||||
|
FirmEntry::Utility() {
|
||||||
|
decl ent = CV(entrant),
|
||||||
|
u =
|
||||||
|
ent*CV(ecost)+(1-ent)*CV(kcoef)*AV(K)
|
||||||
|
| 0.0;
|
||||||
|
return u;
|
||||||
|
}
|
||||||
63
samples/Ox/ParallelObjective.ox
Normal file
63
samples/Ox/ParallelObjective.ox
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
/** Client and Server classes for parallel optimization using CFMPI.**/
|
||||||
|
#include "ParallelObjective.h"
|
||||||
|
|
||||||
|
/** Set up MPI Client-Server support for objective optimization.
|
||||||
|
@param obj `Objective' to parallelize
|
||||||
|
@param DONOTUSECLIENT TRUE (default): client node does no object evaluation<br>FALSE after putting servers to work Client node does one evaluation.
|
||||||
|
**/
|
||||||
|
ParallelObjective(obj,DONOTUSECLIENT) {
|
||||||
|
if (isclass(obj.p2p)) {oxwarning("P2P object already exists for "+obj.L+". Nothing changed"); return;}
|
||||||
|
obj.p2p = new P2P(DONOTUSECLIENT,new ObjClient(obj),new ObjServer(obj));
|
||||||
|
}
|
||||||
|
|
||||||
|
ObjClient::ObjClient(obj) { this.obj = obj; }
|
||||||
|
|
||||||
|
ObjClient::Execute() { }
|
||||||
|
|
||||||
|
ObjServer::ObjServer(obj) {
|
||||||
|
this.obj = obj;
|
||||||
|
basetag = P2P::STOP_TAG+1;
|
||||||
|
iml = obj.NvfuncTerms;
|
||||||
|
Nparams = obj.nstruct;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Wait on the objective client.
|
||||||
|
**/
|
||||||
|
ObjServer::Loop(nxtmsgsz) {
|
||||||
|
Nparams = nxtmsgsz; //free param length is no greater than Nparams
|
||||||
|
if (Volume>QUIET) println("ObjServer server ",ID," Nparams ",Nparams);
|
||||||
|
Server::Loop(Nparams);
|
||||||
|
Recv(ANY_TAG); //receive the ending parameter vector
|
||||||
|
obj->Encode(Buffer[:Nparams-1]); //encode it.
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Do the objective evaluation.
|
||||||
|
Receive structural parameter vector and `Objective::Encode`() it.
|
||||||
|
Call `Objective::vfunc`().
|
||||||
|
@return Nparams (max. length of next expected message);
|
||||||
|
**/
|
||||||
|
ObjServer::Execute() {
|
||||||
|
obj->Decode(Buffer[:obj.nfree-1]);
|
||||||
|
Buffer = obj.cur.V[] = obj->vfunc();
|
||||||
|
if (Volume>QUIET) println("Server Executive: ",ID," vfunc[0]= ",Buffer[0]);
|
||||||
|
return obj.nstruct;
|
||||||
|
}
|
||||||
|
|
||||||
|
CstrServer::CstrServer(obj) { ObjServer(obj); }
|
||||||
|
|
||||||
|
SepServer::SepServer(obj) { ObjServer(obj); }
|
||||||
|
|
||||||
|
CstrServer::Execute() {
|
||||||
|
obj->Encode(Buffer);
|
||||||
|
obj->Lagrangian(0);
|
||||||
|
return rows(Buffer = obj.cur->Vec());
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Separable objective evaluations.
|
||||||
|
**/
|
||||||
|
SepServer::Execute() {
|
||||||
|
obj.Kvar.v = imod(Tag-basetag,obj.K);
|
||||||
|
obj->Encode(Buffer,TRUE);
|
||||||
|
Buffer = obj.Kvar->PDF() * obj->vfunc();
|
||||||
|
return obj.NvfuncTerms;
|
||||||
|
}
|
||||||
38
samples/Ox/particle.oxo
Normal file
38
samples/Ox/particle.oxo
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
nldge::ParticleLogLikeli()
|
||||||
|
{ decl it, ip,
|
||||||
|
mss, mbas, ms, my, mx, vw, vwi, dws,
|
||||||
|
mhi, mhdet, loglikeli, mData,
|
||||||
|
vxm, vxs, mxm=<>, mxsu=<>, mxsl=<>,
|
||||||
|
time, timeall, timeran=0, timelik=0, timefun=0, timeint=0, timeres=0;
|
||||||
|
|
||||||
|
mData = GetData(m_asY);
|
||||||
|
mhdet = sqrt((2*M_PI)^m_cY * determinant(m_mMSbE.^2)); // covariance determinant
|
||||||
|
mhi = invert(m_mMSbE.^2); // invert covariance of measurement shocks
|
||||||
|
|
||||||
|
ms = m_vSss + zeros(m_cPar, m_cS); // start particles
|
||||||
|
mx = m_vXss + zeros(m_cPar, m_cX); // steady state of state and policy
|
||||||
|
|
||||||
|
loglikeli = 0; // init likelihood
|
||||||
|
//timeall=timer();
|
||||||
|
for(it = 0; it < sizer(mData); it++)
|
||||||
|
{
|
||||||
|
mss = rann(m_cPar, m_cSS) * m_mSSbE; // state noise
|
||||||
|
fg(&ms, ms, mx, mss); // transition prior as proposal
|
||||||
|
mx = m_oApprox.FastInterpolate(ms); // interpolate
|
||||||
|
fy(&my, ms, mx, zeros(m_cPar, m_cMS)); // evaluate importance weights
|
||||||
|
my -= mData[it][]; // observation error
|
||||||
|
|
||||||
|
vw = exp(-0.5 * outer(my,mhi,'d')' )/mhdet; // vw = exp(-0.5 * sumr(my*mhi .*my ) )/mhdet;
|
||||||
|
|
||||||
|
vw = vw .== .NaN .? 0 .: vw; // no policy can happen for extrem particles
|
||||||
|
dws = sumc(vw);
|
||||||
|
if(dws==0) return -.Inf; // or extremely wrong parameters
|
||||||
|
loglikeli += log(dws/m_cPar) ; // loglikelihood contribution
|
||||||
|
//timelik += (timer()-time)/100;
|
||||||
|
//time=timer();
|
||||||
|
vwi = resample(vw/dws)-1; // selection step in c++
|
||||||
|
ms = ms[vwi][]; // on normalized weights
|
||||||
|
mx = mx[vwi][];
|
||||||
|
}
|
||||||
|
return loglikeli;
|
||||||
|
}
|
||||||
54
samples/Pan/test.pan
Normal file
54
samples/Pan/test.pan
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
object template pantest;
|
||||||
|
|
||||||
|
# Very simple pan test file
|
||||||
|
"/long/decimal" = 123;
|
||||||
|
"/long/octal" = 0755;
|
||||||
|
"/long/hexadecimal" = 0xFF;
|
||||||
|
|
||||||
|
"/double/simple" = 0.01;
|
||||||
|
"/double/pi" = 3.14159;
|
||||||
|
"/double/exponent" = 1e-8;
|
||||||
|
"/double/scientific" = 1.3E10;
|
||||||
|
|
||||||
|
"/string/single" = 'Faster, but escapes like \t, \n and \x3d don''t work, but '' should work.';
|
||||||
|
"/string/double" = "Slower, but escapes like \t, \n and \x3d do work";
|
||||||
|
|
||||||
|
variable TEST = 2;
|
||||||
|
|
||||||
|
"/x2" = to_string(TEST);
|
||||||
|
"/x2" ?= 'Default value';
|
||||||
|
|
||||||
|
"/x3" = 1 + 2 + value("/long/decimal");
|
||||||
|
|
||||||
|
"/x4" = undef;
|
||||||
|
|
||||||
|
"/x5" = null;
|
||||||
|
|
||||||
|
variable e ?= error("Test error message");
|
||||||
|
|
||||||
|
# include gmond config for services-monitoring
|
||||||
|
include { 'site/ganglia/gmond/services-monitoring' };
|
||||||
|
|
||||||
|
"/software/packages"=pkg_repl("httpd","2.2.3-43.sl5.3",PKG_ARCH_DEFAULT);
|
||||||
|
"/software/packages"=pkg_repl("php");
|
||||||
|
|
||||||
|
# Example function
|
||||||
|
function show_things_view_for_stuff = {
|
||||||
|
thing = ARGV[0];
|
||||||
|
foreach( i; mything; STUFF ) {
|
||||||
|
if ( thing == mything ) {
|
||||||
|
return( true );
|
||||||
|
} else {
|
||||||
|
return SELF;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
false;
|
||||||
|
};
|
||||||
|
|
||||||
|
variable HERE = <<EOF;
|
||||||
|
; This example demonstrates an in-line heredoc style config file
|
||||||
|
[main]
|
||||||
|
awesome = true
|
||||||
|
EOF
|
||||||
|
|
||||||
|
variable small = false;#This should be highlighted normally again.
|
||||||
25
samples/R/scholar.Rd
Normal file
25
samples/R/scholar.Rd
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
\docType{package}
|
||||||
|
\name{scholar}
|
||||||
|
\alias{scholar}
|
||||||
|
\alias{scholar-package}
|
||||||
|
\title{scholar}
|
||||||
|
\source{
|
||||||
|
The package reads data from
|
||||||
|
\url{http://scholar.google.com}. Dates and citation
|
||||||
|
counts are estimated and are determined automatically by
|
||||||
|
a computer program. Use at your own risk.
|
||||||
|
}
|
||||||
|
\description{
|
||||||
|
The \code{scholar} package provides functions to extract
|
||||||
|
citation data from Google Scholar. There are also
|
||||||
|
convenience functions for comparing multiple scholars and
|
||||||
|
predicting h-index scores based on past publication
|
||||||
|
records.
|
||||||
|
}
|
||||||
|
\note{
|
||||||
|
A complementary set of Google Scholar functions can be
|
||||||
|
found at
|
||||||
|
\url{http://biostat.jhsph.edu/~jleek/code/googleCite.r}.
|
||||||
|
The \code{scholar} package was developed independently.
|
||||||
|
}
|
||||||
|
|
||||||
17
samples/SAS/data.sas
Normal file
17
samples/SAS/data.sas
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
/* Example DATA step code for linguist */
|
||||||
|
|
||||||
|
libname source 'C:\path\to\file'
|
||||||
|
|
||||||
|
data work.working_copy;
|
||||||
|
set source.original_file.sas7bdat;
|
||||||
|
run;
|
||||||
|
|
||||||
|
data work.working_copy;
|
||||||
|
set work.working_copy;
|
||||||
|
if Purge = 1 then delete;
|
||||||
|
run;
|
||||||
|
|
||||||
|
data work.working_copy;
|
||||||
|
set work.working_copy;
|
||||||
|
if ImportantVariable = . then MissingFlag = 1;
|
||||||
|
run;
|
||||||
15
samples/SAS/proc.sas
Normal file
15
samples/SAS/proc.sas
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
/* PROC examples for Linguist */
|
||||||
|
|
||||||
|
proc surveyselect data=work.data out=work.boot method=urs reps=20000 seed=2156 sampsize=28 outhits;
|
||||||
|
samplingunit Site;
|
||||||
|
run;
|
||||||
|
|
||||||
|
PROC MI data=work.boot out=work.bootmi nimpute=30 seed=5686 round = 1;
|
||||||
|
By Replicate;
|
||||||
|
VAR Variable1 Variable2;
|
||||||
|
run;
|
||||||
|
|
||||||
|
proc logistic data=work.bootmi descending;
|
||||||
|
By Replicate _Imputation_;
|
||||||
|
model Outcome = Variable1 Variable2 / risklimits;
|
||||||
|
run;
|
||||||
4
samples/Swift/section-11.swift
Normal file
4
samples/Swift/section-11.swift
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
let apples = 3
|
||||||
|
let oranges = 5
|
||||||
|
let appleSummary = "I have \(apples) apples."
|
||||||
|
let fruitSummary = "I have \(apples + oranges) pieces of fruit."
|
||||||
8
samples/Swift/section-13.swift
Normal file
8
samples/Swift/section-13.swift
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
var shoppingList = ["catfish", "water", "tulips", "blue paint"]
|
||||||
|
shoppingList[1] = "bottle of water"
|
||||||
|
|
||||||
|
var occupations = [
|
||||||
|
"Malcolm": "Captain",
|
||||||
|
"Kaylee": "Mechanic",
|
||||||
|
]
|
||||||
|
occupations["Jayne"] = "Public Relations"
|
||||||
2
samples/Swift/section-15.swift
Normal file
2
samples/Swift/section-15.swift
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
let emptyArray = String[]()
|
||||||
|
let emptyDictionary = Dictionary<String, Float>()
|
||||||
1
samples/Swift/section-17.swift
Normal file
1
samples/Swift/section-17.swift
Normal file
@@ -0,0 +1 @@
|
|||||||
|
shoppingList = [] // Went shopping and bought everything.
|
||||||
10
samples/Swift/section-19.swift
Normal file
10
samples/Swift/section-19.swift
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
let individualScores = [75, 43, 103, 87, 12]
|
||||||
|
var teamScore = 0
|
||||||
|
for score in individualScores {
|
||||||
|
if score > 50 {
|
||||||
|
teamScore += 3
|
||||||
|
} else {
|
||||||
|
teamScore += 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
teamScore
|
||||||
8
samples/Swift/section-21.swift
Normal file
8
samples/Swift/section-21.swift
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
var optionalString: String? = "Hello"
|
||||||
|
optionalString == nil
|
||||||
|
|
||||||
|
var optionalName: String? = "John Appleseed"
|
||||||
|
var greeting = "Hello!"
|
||||||
|
if let name = optionalName {
|
||||||
|
greeting = "Hello, \(name)"
|
||||||
|
}
|
||||||
11
samples/Swift/section-23.swift
Normal file
11
samples/Swift/section-23.swift
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
let vegetable = "red pepper"
|
||||||
|
switch vegetable {
|
||||||
|
case "celery":
|
||||||
|
let vegetableComment = "Add some raisins and make ants on a log."
|
||||||
|
case "cucumber", "watercress":
|
||||||
|
let vegetableComment = "That would make a good tea sandwich."
|
||||||
|
case let x where x.hasSuffix("pepper"):
|
||||||
|
let vegetableComment = "Is it a spicy \(x)?"
|
||||||
|
default:
|
||||||
|
let vegetableComment = "Everything tastes good in soup."
|
||||||
|
}
|
||||||
14
samples/Swift/section-25.swift
Normal file
14
samples/Swift/section-25.swift
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
let interestingNumbers = [
|
||||||
|
"Prime": [2, 3, 5, 7, 11, 13],
|
||||||
|
"Fibonacci": [1, 1, 2, 3, 5, 8],
|
||||||
|
"Square": [1, 4, 9, 16, 25],
|
||||||
|
]
|
||||||
|
var largest = 0
|
||||||
|
for (kind, numbers) in interestingNumbers {
|
||||||
|
for number in numbers {
|
||||||
|
if number > largest {
|
||||||
|
largest = number
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
largest
|
||||||
11
samples/Swift/section-27.swift
Normal file
11
samples/Swift/section-27.swift
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
var n = 2
|
||||||
|
while n < 100 {
|
||||||
|
n = n * 2
|
||||||
|
}
|
||||||
|
n
|
||||||
|
|
||||||
|
var m = 2
|
||||||
|
do {
|
||||||
|
m = m * 2
|
||||||
|
} while m < 100
|
||||||
|
m
|
||||||
11
samples/Swift/section-29.swift
Normal file
11
samples/Swift/section-29.swift
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
var firstForLoop = 0
|
||||||
|
for i in 0..3 {
|
||||||
|
firstForLoop += i
|
||||||
|
}
|
||||||
|
firstForLoop
|
||||||
|
|
||||||
|
var secondForLoop = 0
|
||||||
|
for var i = 0; i < 3; ++i {
|
||||||
|
secondForLoop += 1
|
||||||
|
}
|
||||||
|
secondForLoop
|
||||||
1
samples/Swift/section-3.swift
Normal file
1
samples/Swift/section-3.swift
Normal file
@@ -0,0 +1 @@
|
|||||||
|
println("Hello, world")
|
||||||
4
samples/Swift/section-31.swift
Normal file
4
samples/Swift/section-31.swift
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
func greet(name: String, day: String) -> String {
|
||||||
|
return "Hello \(name), today is \(day)."
|
||||||
|
}
|
||||||
|
greet("Bob", "Tuesday")
|
||||||
4
samples/Swift/section-33.swift
Normal file
4
samples/Swift/section-33.swift
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
func getGasPrices() -> (Double, Double, Double) {
|
||||||
|
return (3.59, 3.69, 3.79)
|
||||||
|
}
|
||||||
|
getGasPrices()
|
||||||
9
samples/Swift/section-35.swift
Normal file
9
samples/Swift/section-35.swift
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
func sumOf(numbers: Int...) -> Int {
|
||||||
|
var sum = 0
|
||||||
|
for number in numbers {
|
||||||
|
sum += number
|
||||||
|
}
|
||||||
|
return sum
|
||||||
|
}
|
||||||
|
sumOf()
|
||||||
|
sumOf(42, 597, 12)
|
||||||
9
samples/Swift/section-37.swift
Normal file
9
samples/Swift/section-37.swift
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
func returnFifteen() -> Int {
|
||||||
|
var y = 10
|
||||||
|
func add() {
|
||||||
|
y += 5
|
||||||
|
}
|
||||||
|
add()
|
||||||
|
return y
|
||||||
|
}
|
||||||
|
returnFifteen()
|
||||||
8
samples/Swift/section-39.swift
Normal file
8
samples/Swift/section-39.swift
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
func makeIncrementer() -> (Int -> Int) {
|
||||||
|
func addOne(number: Int) -> Int {
|
||||||
|
return 1 + number
|
||||||
|
}
|
||||||
|
return addOne
|
||||||
|
}
|
||||||
|
var increment = makeIncrementer()
|
||||||
|
increment(7)
|
||||||
13
samples/Swift/section-41.swift
Normal file
13
samples/Swift/section-41.swift
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
func hasAnyMatches(list: Int[], condition: Int -> Bool) -> Bool {
|
||||||
|
for item in list {
|
||||||
|
if condition(item) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
func lessThanTen(number: Int) -> Bool {
|
||||||
|
return number < 10
|
||||||
|
}
|
||||||
|
var numbers = [20, 19, 7, 12]
|
||||||
|
hasAnyMatches(numbers, lessThanTen)
|
||||||
5
samples/Swift/section-43.swift
Normal file
5
samples/Swift/section-43.swift
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
numbers.map({
|
||||||
|
(number: Int) -> Int in
|
||||||
|
let result = 3 * number
|
||||||
|
return result
|
||||||
|
})
|
||||||
1
samples/Swift/section-45.swift
Normal file
1
samples/Swift/section-45.swift
Normal file
@@ -0,0 +1 @@
|
|||||||
|
numbers.map({ number in 3 * number })
|
||||||
1
samples/Swift/section-47.swift
Normal file
1
samples/Swift/section-47.swift
Normal file
@@ -0,0 +1 @@
|
|||||||
|
sort([1, 5, 3, 12, 2]) { $0 > $1 }
|
||||||
6
samples/Swift/section-49.swift
Normal file
6
samples/Swift/section-49.swift
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
class Shape {
|
||||||
|
var numberOfSides = 0
|
||||||
|
func simpleDescription() -> String {
|
||||||
|
return "A shape with \(numberOfSides) sides."
|
||||||
|
}
|
||||||
|
}
|
||||||
3
samples/Swift/section-5.swift
Normal file
3
samples/Swift/section-5.swift
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
var myVariable = 42
|
||||||
|
myVariable = 50
|
||||||
|
let myConstant = 42
|
||||||
3
samples/Swift/section-51.swift
Normal file
3
samples/Swift/section-51.swift
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
var shape = Shape()
|
||||||
|
shape.numberOfSides = 7
|
||||||
|
var shapeDescription = shape.simpleDescription()
|
||||||
12
samples/Swift/section-53.swift
Normal file
12
samples/Swift/section-53.swift
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
class NamedShape {
|
||||||
|
var numberOfSides: Int = 0
|
||||||
|
var name: String
|
||||||
|
|
||||||
|
init(name: String) {
|
||||||
|
self.name = name
|
||||||
|
}
|
||||||
|
|
||||||
|
func simpleDescription() -> String {
|
||||||
|
return "A shape with \(numberOfSides) sides."
|
||||||
|
}
|
||||||
|
}
|
||||||
20
samples/Swift/section-55.swift
Normal file
20
samples/Swift/section-55.swift
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
class Square: NamedShape {
|
||||||
|
var sideLength: Double
|
||||||
|
|
||||||
|
init(sideLength: Double, name: String) {
|
||||||
|
self.sideLength = sideLength
|
||||||
|
super.init(name: name)
|
||||||
|
numberOfSides = 4
|
||||||
|
}
|
||||||
|
|
||||||
|
func area() -> Double {
|
||||||
|
return sideLength * sideLength
|
||||||
|
}
|
||||||
|
|
||||||
|
override func simpleDescription() -> String {
|
||||||
|
return "A square with sides of length \(sideLength)."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let test = Square(sideLength: 5.2, name: "my test square")
|
||||||
|
test.area()
|
||||||
|
test.simpleDescription()
|
||||||
26
samples/Swift/section-57.swift
Normal file
26
samples/Swift/section-57.swift
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
class EquilateralTriangle: NamedShape {
|
||||||
|
var sideLength: Double = 0.0
|
||||||
|
|
||||||
|
init(sideLength: Double, name: String) {
|
||||||
|
self.sideLength = sideLength
|
||||||
|
super.init(name: name)
|
||||||
|
numberOfSides = 3
|
||||||
|
}
|
||||||
|
|
||||||
|
var perimeter: Double {
|
||||||
|
get {
|
||||||
|
return 3.0 * sideLength
|
||||||
|
}
|
||||||
|
set {
|
||||||
|
sideLength = newValue / 3.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override func simpleDescription() -> String {
|
||||||
|
return "An equilateral triagle with sides of length \(sideLength)."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var triangle = EquilateralTriangle(sideLength: 3.1, name: "a triangle")
|
||||||
|
triangle.perimeter
|
||||||
|
triangle.perimeter = 9.9
|
||||||
|
triangle.sideLength
|
||||||
21
samples/Swift/section-59.swift
Normal file
21
samples/Swift/section-59.swift
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
class TriangleAndSquare {
|
||||||
|
var triangle: EquilateralTriangle {
|
||||||
|
willSet {
|
||||||
|
square.sideLength = newValue.sideLength
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var square: Square {
|
||||||
|
willSet {
|
||||||
|
triangle.sideLength = newValue.sideLength
|
||||||
|
}
|
||||||
|
}
|
||||||
|
init(size: Double, name: String) {
|
||||||
|
square = Square(sideLength: size, name: name)
|
||||||
|
triangle = EquilateralTriangle(sideLength: size, name: name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var triangleAndSquare = TriangleAndSquare(size: 10, name: "another test shape")
|
||||||
|
triangleAndSquare.square.sideLength
|
||||||
|
triangleAndSquare.triangle.sideLength
|
||||||
|
triangleAndSquare.square = Square(sideLength: 50, name: "larger square")
|
||||||
|
triangleAndSquare.triangle.sideLength
|
||||||
8
samples/Swift/section-61.swift
Normal file
8
samples/Swift/section-61.swift
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
class Counter {
|
||||||
|
var count: Int = 0
|
||||||
|
func incrementBy(amount: Int, numberOfTimes times: Int) {
|
||||||
|
count += amount * times
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var counter = Counter()
|
||||||
|
counter.incrementBy(2, numberOfTimes: 7)
|
||||||
2
samples/Swift/section-63.swift
Normal file
2
samples/Swift/section-63.swift
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
let optionalSquare: Square? = Square(sideLength: 2.5, name: "optional square")
|
||||||
|
let sideLength = optionalSquare?.sideLength
|
||||||
21
samples/Swift/section-65.swift
Normal file
21
samples/Swift/section-65.swift
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
enum Rank: Int {
|
||||||
|
case Ace = 1
|
||||||
|
case Two, Three, Four, Five, Six, Seven, Eight, Nine, Ten
|
||||||
|
case Jack, Queen, King
|
||||||
|
func simpleDescription() -> String {
|
||||||
|
switch self {
|
||||||
|
case .Ace:
|
||||||
|
return "ace"
|
||||||
|
case .Jack:
|
||||||
|
return "jack"
|
||||||
|
case .Queen:
|
||||||
|
return "queen"
|
||||||
|
case .King:
|
||||||
|
return "king"
|
||||||
|
default:
|
||||||
|
return String(self.toRaw())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let ace = Rank.Ace
|
||||||
|
let aceRawValue = ace.toRaw()
|
||||||
3
samples/Swift/section-67.swift
Normal file
3
samples/Swift/section-67.swift
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
if let convertedRank = Rank.fromRaw(3) {
|
||||||
|
let threeDescription = convertedRank.simpleDescription()
|
||||||
|
}
|
||||||
17
samples/Swift/section-69.swift
Normal file
17
samples/Swift/section-69.swift
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
enum Suit {
|
||||||
|
case Spades, Hearts, Diamonds, Clubs
|
||||||
|
func simpleDescription() -> String {
|
||||||
|
switch self {
|
||||||
|
case .Spades:
|
||||||
|
return "spades"
|
||||||
|
case .Hearts:
|
||||||
|
return "hearts"
|
||||||
|
case .Diamonds:
|
||||||
|
return "diamonds"
|
||||||
|
case .Clubs:
|
||||||
|
return "clubs"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let hearts = Suit.Hearts
|
||||||
|
let heartsDescription = hearts.simpleDescription()
|
||||||
3
samples/Swift/section-7.swift
Normal file
3
samples/Swift/section-7.swift
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
let implicitInteger = 70
|
||||||
|
let implicitDouble = 70.0
|
||||||
|
let explicitDouble: Double = 70
|
||||||
9
samples/Swift/section-71.swift
Normal file
9
samples/Swift/section-71.swift
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
struct Card {
|
||||||
|
var rank: Rank
|
||||||
|
var suit: Suit
|
||||||
|
func simpleDescription() -> String {
|
||||||
|
return "The \(rank.simpleDescription()) of \(suit.simpleDescription())"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let threeOfSpades = Card(rank: .Three, suit: .Spades)
|
||||||
|
let threeOfSpadesDescription = threeOfSpades.simpleDescription()
|
||||||
14
samples/Swift/section-73.swift
Normal file
14
samples/Swift/section-73.swift
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
enum ServerResponse {
|
||||||
|
case Result(String, String)
|
||||||
|
case Error(String)
|
||||||
|
}
|
||||||
|
|
||||||
|
let success = ServerResponse.Result("6:00 am", "8:09 pm")
|
||||||
|
let failure = ServerResponse.Error("Out of cheese.")
|
||||||
|
|
||||||
|
switch success {
|
||||||
|
case let .Result(sunrise, sunset):
|
||||||
|
let serverResponse = "Sunrise is at \(sunrise) and sunset is at \(sunset)."
|
||||||
|
case let .Error(error):
|
||||||
|
let serverResponse = "Failure... \(error)"
|
||||||
|
}
|
||||||
4
samples/Swift/section-75.swift
Normal file
4
samples/Swift/section-75.swift
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
protocol ExampleProtocol {
|
||||||
|
var simpleDescription: String { get }
|
||||||
|
mutating func adjust()
|
||||||
|
}
|
||||||
20
samples/Swift/section-77.swift
Normal file
20
samples/Swift/section-77.swift
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
class SimpleClass: ExampleProtocol {
|
||||||
|
var simpleDescription: String = "A very simple class."
|
||||||
|
var anotherProperty: Int = 69105
|
||||||
|
func adjust() {
|
||||||
|
simpleDescription += " Now 100% adjusted."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var a = SimpleClass()
|
||||||
|
a.adjust()
|
||||||
|
let aDescription = a.simpleDescription
|
||||||
|
|
||||||
|
struct SimpleStructure: ExampleProtocol {
|
||||||
|
var simpleDescription: String = "A simple structure"
|
||||||
|
mutating func adjust() {
|
||||||
|
simpleDescription += " (adjusted)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var b = SimpleStructure()
|
||||||
|
b.adjust()
|
||||||
|
let bDescription = b.simpleDescription
|
||||||
9
samples/Swift/section-79.swift
Normal file
9
samples/Swift/section-79.swift
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
extension Int: ExampleProtocol {
|
||||||
|
var simpleDescription: String {
|
||||||
|
return "The number \(self)"
|
||||||
|
}
|
||||||
|
mutating func adjust() {
|
||||||
|
self += 42
|
||||||
|
}
|
||||||
|
}
|
||||||
|
7.simpleDescription
|
||||||
3
samples/Swift/section-81.swift
Normal file
3
samples/Swift/section-81.swift
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
let protocolValue: ExampleProtocol = a
|
||||||
|
protocolValue.simpleDescription
|
||||||
|
// protocolValue.anotherProperty // Uncomment to see the error
|
||||||
8
samples/Swift/section-83.swift
Normal file
8
samples/Swift/section-83.swift
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
func repeat<ItemType>(item: ItemType, times: Int) -> ItemType[] {
|
||||||
|
var result = ItemType[]()
|
||||||
|
for i in 0..times {
|
||||||
|
result += item
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
repeat("knock", 4)
|
||||||
7
samples/Swift/section-85.swift
Normal file
7
samples/Swift/section-85.swift
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
// Reimplement the Swift standard library's optional type
|
||||||
|
enum OptionalValue<T> {
|
||||||
|
case None
|
||||||
|
case Some(T)
|
||||||
|
}
|
||||||
|
var possibleInteger: OptionalValue<Int> = .None
|
||||||
|
possibleInteger = .Some(100)
|
||||||
11
samples/Swift/section-87.swift
Normal file
11
samples/Swift/section-87.swift
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
func anyCommonElements <T, U where T: Sequence, U: Sequence, T.GeneratorType.Element: Equatable, T.GeneratorType.Element == U.GeneratorType.Element> (lhs: T, rhs: U) -> Bool {
|
||||||
|
for lhsItem in lhs {
|
||||||
|
for rhsItem in rhs {
|
||||||
|
if lhsItem == rhsItem {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
anyCommonElements([1, 2, 3], [3])
|
||||||
3
samples/Swift/section-9.swift
Normal file
3
samples/Swift/section-9.swift
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
let label = "The width is "
|
||||||
|
let width = 94
|
||||||
|
let widthLabel = label + String(width)
|
||||||
1
samples/Text/iso8859-8-i.txt
Normal file
1
samples/Text/iso8859-8-i.txt
Normal file
@@ -0,0 +1 @@
|
|||||||
|
%<25><><EFBFBD>
|
||||||
BIN
samples/Text/utf16le-windows.txt
Normal file
BIN
samples/Text/utf16le-windows.txt
Normal file
Binary file not shown.
BIN
samples/Text/utf16le.txt
Normal file
BIN
samples/Text/utf16le.txt
Normal file
Binary file not shown.
21
samples/Zimpl/sample.zmpl
Normal file
21
samples/Zimpl/sample.zmpl
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# $Id: queens3.zpl,v 1.3 2009/09/13 16:15:53 bzfkocht Exp $
|
||||||
|
#
|
||||||
|
# This is a formulation of the n queens problem using binary variables.
|
||||||
|
# variables. Since the number of queens is maximized, the size of the
|
||||||
|
# board can be set arbitrarily.
|
||||||
|
#
|
||||||
|
param columns := 8;
|
||||||
|
|
||||||
|
set I := { 1 .. columns };
|
||||||
|
set IxI := I * I;
|
||||||
|
|
||||||
|
set TABU[<i,j> in IxI] := { <m,n> in IxI with
|
||||||
|
(m != i or n != j) and (m == i or n == j or abs(m - i) == abs(n - j)) };
|
||||||
|
|
||||||
|
var x[IxI] binary;
|
||||||
|
|
||||||
|
maximize queens: sum <i,j> in IxI : x[i,j];
|
||||||
|
|
||||||
|
subto c1: forall <i,j> in IxI do
|
||||||
|
card(TABU[i,j]) - card(TABU[i,j]) * x[i,j] >= sum <m,n> in TABU[i,j] : x[m,n];
|
||||||
|
|
||||||
@@ -11,6 +11,17 @@ class TestBlob < Test::Unit::TestCase
|
|||||||
|
|
||||||
Lexer = Pygments::Lexer
|
Lexer = Pygments::Lexer
|
||||||
|
|
||||||
|
def setup
|
||||||
|
# git blobs are normally loaded as ASCII-8BIT since they may contain data
|
||||||
|
# with arbitrary encoding not known ahead of time
|
||||||
|
@original_external = Encoding.default_external
|
||||||
|
Encoding.default_external = Encoding.find("ASCII-8BIT")
|
||||||
|
end
|
||||||
|
|
||||||
|
def teardown
|
||||||
|
Encoding.default_external = @original_external
|
||||||
|
end
|
||||||
|
|
||||||
def samples_path
|
def samples_path
|
||||||
File.expand_path("../../samples", __FILE__)
|
File.expand_path("../../samples", __FILE__)
|
||||||
end
|
end
|
||||||
@@ -67,6 +78,14 @@ class TestBlob < Test::Unit::TestCase
|
|||||||
assert_equal 475, blob("Emacs Lisp/ess-julia.el").lines.length
|
assert_equal 475, blob("Emacs Lisp/ess-julia.el").lines.length
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def test_lines_maintains_original_encoding
|
||||||
|
# Even if the file's encoding is detected as something like UTF-16LE,
|
||||||
|
# earlier versions of the gem made implicit guarantees that the encoding of
|
||||||
|
# each `line` is in the same encoding as the file was originally read (in
|
||||||
|
# practice, UTF-8 or ASCII-8BIT)
|
||||||
|
assert_equal Encoding.default_external, blob("Text/utf16le.txt").lines.first.encoding
|
||||||
|
end
|
||||||
|
|
||||||
def test_size
|
def test_size
|
||||||
assert_equal 15, blob("Ruby/foo.rb").size
|
assert_equal 15, blob("Ruby/foo.rb").size
|
||||||
end
|
end
|
||||||
@@ -77,12 +96,16 @@ class TestBlob < Test::Unit::TestCase
|
|||||||
|
|
||||||
def test_sloc
|
def test_sloc
|
||||||
assert_equal 2, blob("Ruby/foo.rb").sloc
|
assert_equal 2, blob("Ruby/foo.rb").sloc
|
||||||
|
assert_equal 3, blob("Text/utf16le-windows.txt").sloc
|
||||||
|
assert_equal 1, blob("Text/iso8859-8-i.txt").sloc
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_encoding
|
def test_encoding
|
||||||
assert_equal "ISO-8859-2", blob("Text/README").encoding
|
assert_equal "ISO-8859-2", blob("Text/README").encoding
|
||||||
assert_equal "ISO-8859-1", blob("Text/dump.sql").encoding
|
assert_equal "ISO-8859-1", blob("Text/dump.sql").encoding
|
||||||
assert_equal "UTF-8", blob("Text/foo.txt").encoding
|
assert_equal "UTF-8", blob("Text/foo.txt").encoding
|
||||||
|
assert_equal "UTF-16LE", blob("Text/utf16le.txt").encoding
|
||||||
|
assert_equal "UTF-16LE", blob("Text/utf16le-windows.txt").encoding
|
||||||
assert_nil blob("Binary/dog.o").encoding
|
assert_nil blob("Binary/dog.o").encoding
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user