Skip to content

Commit

Permalink
0.0.21 (#22)
Browse files Browse the repository at this point in the history
* 0.0.20

* notebook updates

* adjustment for displayHTML

* typo

* adjust for datediff

* 0.0.19

* 0.0.21 removing unused functions
  • Loading branch information
orellabac authored Jan 30, 2023
1 parent ad0f720 commit 3d2de54
Show file tree
Hide file tree
Showing 5 changed files with 13 additions and 22 deletions.
7 changes: 6 additions & 1 deletion CHANGE_LOG.txt
Original file line number Diff line number Diff line change
Expand Up @@ -107,4 +107,9 @@ Fixing some test_cases

Version 0.0.20
--------------
Changing the implementation for notebook integration for dataframe to a more standard approach
Fixing issue with bround

Version 0.0.21
--------------
Removing function that are now available in snowpark
Adjust to use snowpark-python>=1.1.0
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -198,8 +198,8 @@ df.group_by("ID").applyInPandas(
| Name | Description |
|------------------------------|-------------------------------------------------------------------------------------|
| functions.array_sort | sorts the input array in ascending order or descending order. The elements of the input array must be orderable. Null elements will be placed at the end of the returned array. |
| functions.unix_timestamp | returns the UNIX timestamp of current time. |
| functions.from_unixtimestamp | can be used to convert UNIX time to Snowflake timestamp |
| ~~functions.unix_timestamp~~ | ~~returns the UNIX timestamp of current time.~~ **Available in snowpark-python >= 1.1.0** |
| ~~functions.from_unixtimestamp~~ | ~~can be used to convert UNIX time to Snowflake timestamp~~ **Available in snowpark-python >= 1.1.0** |
| functions.format_number | formats numbers using the specified number of decimal places |
| functions.reverse | returns a reversed string |
| functions.explode | returns a new row for each element in the given array
Expand All @@ -213,8 +213,8 @@ df.group_by("ID").applyInPandas(
| functions.date_add | returns the date that is n days days after |
| functions.date_sub | returns the date that is n days before |
| functions.regexp_extract | extract a specific group matched by a regex, from the specified string column. |
| functions.asc | returns a sort expression based on the ascending order of the given column name. |
| functions.desc | returns a sort expression based on the descending order of the given column name. |
| ~~functions.asc~~ | ~~returns a sort expression based on the ascending order of the given column name.~~ **Available in snowpark-python >=1.1.0** |
| ~~functions.desc~~ | ~~returns a sort expression based on the descending order of the given column name.~~ **Available in snowpark-python >=1.1.0** |
| functions.flatten | creates a single array from an array of arrays
| functions.sort_array | sorts the input array in ascending or descending order according to the natural ordering of the array elements. Null elements will be placed at the beginning of the returned array in ascending order or at the end of the returned array in descending order
| functions.map_values | Returns an unordered array containing the values of the map. |
Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
this_directory = Path(__file__).parent
long_description = (this_directory / "README.md").read_text()

VERSION = '0.0.20'
VERSION = '0.0.21'

setup(name='snowpark_extensions',
version=VERSION,
Expand All @@ -14,7 +14,7 @@
long_description_content_type='text/markdown',
url='http://github.com/MobilizeNet/snowpark-extensions-py',
author='mauricio.rojas',
install_requires=['snowflake-snowpark-python[pandas]',
install_requires=['snowflake-snowpark-python[pandas]>=1.1.0',
'shortuuid', 'nest_asyncio', 'jinja2', 'rich'],
author_email='[email protected]',
packages=['snowpark_extensions'],
Expand Down
13 changes: 0 additions & 13 deletions snowpark_extensions/functions_extensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,13 +51,6 @@ def regexp_extract(value:ColumnOrLiteralStr,regexp:ColumnOrLiteralStr,idx:int) -
# we add .* to the expression if needed
return coalesce(call_builtin('regexp_substr',value,regexp,lit(1),lit(1),lit('e'),idx),lit(''))

def unix_timestamp(col):
return call_builtin("DATE_PART","epoch_second",col)

def from_unixtime(col):
col = _to_col_if_str(col,"from_unixtime")
return F.to_timestamp(col).alias('ts')

def format_number(col,d):
col = _to_col_if_str(col,"format_number")
return F.to_varchar(col,'999,999,999,999,999.' + '0'*d)
Expand Down Expand Up @@ -295,17 +288,11 @@ def _bround(col: Column, scale: int = 0):
F.array_distinct = array_distinct
F.regexp_extract = regexp_extract
F.create_map = create_map
F.unix_timestamp = unix_timestamp
F.from_unixtime = from_unixtime
F.format_number = format_number
F.reverse = reverse
F.daydiff = daydiff
F.date_add = date_add
F.date_sub = date_sub
F.asc = lambda col: _to_col_if_str(col, "asc").asc()
F.desc = lambda col: _to_col_if_str(col, "desc").desc()
F.asc_nulls_first = lambda col: _to_col_if_str(col, "asc_nulls_first").asc()
F.desc_nulls_first = lambda col: _to_col_if_str(col, "desc_nulls_first").asc()
F.sort_array = _sort_array
F.array_sort = _array_sort
F.struct = _struct
Expand Down
3 changes: 1 addition & 2 deletions tests/test_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,13 +224,12 @@ def test_struct():
assert re.sub(r"\s","",res[0].STRUCT) == '{"A":80,"B":"Bob"}'
assert re.sub(r"\s","",res[1].STRUCT) == '{"A":null,"B":"Alice"}'

def test_datediff():
def test_daydiff():
session = Session.builder.from_snowsql().getOrCreate()
df = session.createDataFrame([('2015-04-08','2015-05-10')], ['d1', 'd2'])
res = df.select(F.daydiff(F.to_date(df.d2), F.to_date(df.d1)).alias('diff')).collect()
assert res[0].DIFF == 32


def test_bround():
session = Session.builder.from_snowsql().getOrCreate()
data0 = [(1.5,0),
Expand Down

0 comments on commit 3d2de54

Please sign in to comment.